summaryrefslogtreecommitdiffstats
path: root/src/text/template/parse
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
commit73df946d56c74384511a194dd01dbe099584fd1a (patch)
treefd0bcea490dd81327ddfbb31e215439672c9a068 /src/text/template/parse
parentInitial commit. (diff)
downloadgolang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.tar.xz
golang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.zip
Adding upstream version 1.16.10.upstream/1.16.10upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/text/template/parse')
-rw-r--r--src/text/template/parse/lex.go671
-rw-r--r--src/text/template/parse/lex_test.go557
-rw-r--r--src/text/template/parse/node.go972
-rw-r--r--src/text/template/parse/parse.go754
-rw-r--r--src/text/template/parse/parse_test.go652
5 files changed, 3606 insertions, 0 deletions
diff --git a/src/text/template/parse/lex.go b/src/text/template/parse/lex.go
new file mode 100644
index 0000000..6784071
--- /dev/null
+++ b/src/text/template/parse/lex.go
@@ -0,0 +1,671 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package parse
+
+import (
+ "fmt"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// item represents a token or text string returned from the scanner.
+type item struct {
+ typ itemType // The type of this item.
+ pos Pos // The starting position, in bytes, of this item in the input string.
+ val string // The value of this item.
+ line int // The line number at the start of this item.
+}
+
+func (i item) String() string {
+ switch {
+ case i.typ == itemEOF:
+ return "EOF"
+ case i.typ == itemError:
+ return i.val
+ case i.typ > itemKeyword:
+ return fmt.Sprintf("<%s>", i.val)
+ case len(i.val) > 10:
+ return fmt.Sprintf("%.10q...", i.val)
+ }
+ return fmt.Sprintf("%q", i.val)
+}
+
+// itemType identifies the type of lex items.
+type itemType int
+
+const (
+ itemError itemType = iota // error occurred; value is text of error
+ itemBool // boolean constant
+ itemChar // printable ASCII character; grab bag for comma etc.
+ itemCharConstant // character constant
+ itemComment // comment text
+ itemComplex // complex constant (1+2i); imaginary is just a number
+ itemAssign // equals ('=') introducing an assignment
+ itemDeclare // colon-equals (':=') introducing a declaration
+ itemEOF
+ itemField // alphanumeric identifier starting with '.'
+ itemIdentifier // alphanumeric identifier not starting with '.'
+ itemLeftDelim // left action delimiter
+ itemLeftParen // '(' inside action
+ itemNumber // simple number, including imaginary
+ itemPipe // pipe symbol
+ itemRawString // raw quoted string (includes quotes)
+ itemRightDelim // right action delimiter
+ itemRightParen // ')' inside action
+ itemSpace // run of spaces separating arguments
+ itemString // quoted string (includes quotes)
+ itemText // plain text
+ itemVariable // variable starting with '$', such as '$' or '$1' or '$hello'
+ // Keywords appear after all the rest.
+ itemKeyword // used only to delimit the keywords
+ itemBlock // block keyword
+ itemDot // the cursor, spelled '.'
+ itemDefine // define keyword
+ itemElse // else keyword
+ itemEnd // end keyword
+ itemIf // if keyword
+ itemNil // the untyped nil constant, easiest to treat as a keyword
+ itemRange // range keyword
+ itemTemplate // template keyword
+ itemWith // with keyword
+)
+
+var key = map[string]itemType{
+ ".": itemDot,
+ "block": itemBlock,
+ "define": itemDefine,
+ "else": itemElse,
+ "end": itemEnd,
+ "if": itemIf,
+ "range": itemRange,
+ "nil": itemNil,
+ "template": itemTemplate,
+ "with": itemWith,
+}
+
+const eof = -1
+
+// Trimming spaces.
+// If the action begins "{{- " rather than "{{", then all space/tab/newlines
+// preceding the action are trimmed; conversely if it ends " -}}" the
+// leading spaces are trimmed. This is done entirely in the lexer; the
+// parser never sees it happen. We require an ASCII space (' ', \t, \r, \n)
+// to be present to avoid ambiguity with things like "{{-3}}". It reads
+// better with the space present anyway. For simplicity, only ASCII
+// does the job.
+const (
+ spaceChars = " \t\r\n" // These are the space characters defined by Go itself.
+ trimMarker = '-' // Attached to left/right delimiter, trims trailing spaces from preceding/following text.
+ trimMarkerLen = Pos(1 + 1) // marker plus space before or after
+)
+
+// stateFn represents the state of the scanner as a function that returns the next state.
+type stateFn func(*lexer) stateFn
+
+// lexer holds the state of the scanner.
+type lexer struct {
+ name string // the name of the input; used only for error reports
+ input string // the string being scanned
+ leftDelim string // start of action
+ rightDelim string // end of action
+ emitComment bool // emit itemComment tokens.
+ pos Pos // current position in the input
+ start Pos // start position of this item
+ width Pos // width of last rune read from input
+ items chan item // channel of scanned items
+ parenDepth int // nesting depth of ( ) exprs
+ line int // 1+number of newlines seen
+ startLine int // start line of this item
+}
+
+// next returns the next rune in the input.
+func (l *lexer) next() rune {
+ if int(l.pos) >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+ r, w := utf8.DecodeRuneInString(l.input[l.pos:])
+ l.width = Pos(w)
+ l.pos += l.width
+ if r == '\n' {
+ l.line++
+ }
+ return r
+}
+
+// peek returns but does not consume the next rune in the input.
+func (l *lexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+// backup steps back one rune. Can only be called once per call of next.
+func (l *lexer) backup() {
+ l.pos -= l.width
+ // Correct newline count.
+ if l.width == 1 && l.input[l.pos] == '\n' {
+ l.line--
+ }
+}
+
+// emit passes an item back to the client.
+func (l *lexer) emit(t itemType) {
+ l.items <- item{t, l.start, l.input[l.start:l.pos], l.startLine}
+ l.start = l.pos
+ l.startLine = l.line
+}
+
+// ignore skips over the pending input before this point.
+func (l *lexer) ignore() {
+ l.line += strings.Count(l.input[l.start:l.pos], "\n")
+ l.start = l.pos
+ l.startLine = l.line
+}
+
+// accept consumes the next rune if it's from the valid set.
+func (l *lexer) accept(valid string) bool {
+ if strings.ContainsRune(valid, l.next()) {
+ return true
+ }
+ l.backup()
+ return false
+}
+
+// acceptRun consumes a run of runes from the valid set.
+func (l *lexer) acceptRun(valid string) {
+ for strings.ContainsRune(valid, l.next()) {
+ }
+ l.backup()
+}
+
+// errorf returns an error token and terminates the scan by passing
+// back a nil pointer that will be the next state, terminating l.nextItem.
+func (l *lexer) errorf(format string, args ...interface{}) stateFn {
+ l.items <- item{itemError, l.start, fmt.Sprintf(format, args...), l.startLine}
+ return nil
+}
+
+// nextItem returns the next item from the input.
+// Called by the parser, not in the lexing goroutine.
+func (l *lexer) nextItem() item {
+ return <-l.items
+}
+
+// drain drains the output so the lexing goroutine will exit.
+// Called by the parser, not in the lexing goroutine.
+func (l *lexer) drain() {
+ for range l.items {
+ }
+}
+
+// lex creates a new scanner for the input string.
+func lex(name, input, left, right string, emitComment bool) *lexer {
+ if left == "" {
+ left = leftDelim
+ }
+ if right == "" {
+ right = rightDelim
+ }
+ l := &lexer{
+ name: name,
+ input: input,
+ leftDelim: left,
+ rightDelim: right,
+ emitComment: emitComment,
+ items: make(chan item),
+ line: 1,
+ startLine: 1,
+ }
+ go l.run()
+ return l
+}
+
+// run runs the state machine for the lexer.
+func (l *lexer) run() {
+ for state := lexText; state != nil; {
+ state = state(l)
+ }
+ close(l.items)
+}
+
+// state functions
+
+const (
+ leftDelim = "{{"
+ rightDelim = "}}"
+ leftComment = "/*"
+ rightComment = "*/"
+)
+
+// lexText scans until an opening action delimiter, "{{".
+func lexText(l *lexer) stateFn {
+ l.width = 0
+ if x := strings.Index(l.input[l.pos:], l.leftDelim); x >= 0 {
+ ldn := Pos(len(l.leftDelim))
+ l.pos += Pos(x)
+ trimLength := Pos(0)
+ if hasLeftTrimMarker(l.input[l.pos+ldn:]) {
+ trimLength = rightTrimLength(l.input[l.start:l.pos])
+ }
+ l.pos -= trimLength
+ if l.pos > l.start {
+ l.line += strings.Count(l.input[l.start:l.pos], "\n")
+ l.emit(itemText)
+ }
+ l.pos += trimLength
+ l.ignore()
+ return lexLeftDelim
+ }
+ l.pos = Pos(len(l.input))
+ // Correctly reached EOF.
+ if l.pos > l.start {
+ l.line += strings.Count(l.input[l.start:l.pos], "\n")
+ l.emit(itemText)
+ }
+ l.emit(itemEOF)
+ return nil
+}
+
+// rightTrimLength returns the length of the spaces at the end of the string.
+func rightTrimLength(s string) Pos {
+ return Pos(len(s) - len(strings.TrimRight(s, spaceChars)))
+}
+
+// atRightDelim reports whether the lexer is at a right delimiter, possibly preceded by a trim marker.
+func (l *lexer) atRightDelim() (delim, trimSpaces bool) {
+ if hasRightTrimMarker(l.input[l.pos:]) && strings.HasPrefix(l.input[l.pos+trimMarkerLen:], l.rightDelim) { // With trim marker.
+ return true, true
+ }
+ if strings.HasPrefix(l.input[l.pos:], l.rightDelim) { // Without trim marker.
+ return true, false
+ }
+ return false, false
+}
+
+// leftTrimLength returns the length of the spaces at the beginning of the string.
+func leftTrimLength(s string) Pos {
+ return Pos(len(s) - len(strings.TrimLeft(s, spaceChars)))
+}
+
+// lexLeftDelim scans the left delimiter, which is known to be present, possibly with a trim marker.
+func lexLeftDelim(l *lexer) stateFn {
+ l.pos += Pos(len(l.leftDelim))
+ trimSpace := hasLeftTrimMarker(l.input[l.pos:])
+ afterMarker := Pos(0)
+ if trimSpace {
+ afterMarker = trimMarkerLen
+ }
+ if strings.HasPrefix(l.input[l.pos+afterMarker:], leftComment) {
+ l.pos += afterMarker
+ l.ignore()
+ return lexComment
+ }
+ l.emit(itemLeftDelim)
+ l.pos += afterMarker
+ l.ignore()
+ l.parenDepth = 0
+ return lexInsideAction
+}
+
+// lexComment scans a comment. The left comment marker is known to be present.
+func lexComment(l *lexer) stateFn {
+ l.pos += Pos(len(leftComment))
+ i := strings.Index(l.input[l.pos:], rightComment)
+ if i < 0 {
+ return l.errorf("unclosed comment")
+ }
+ l.pos += Pos(i + len(rightComment))
+ delim, trimSpace := l.atRightDelim()
+ if !delim {
+ return l.errorf("comment ends before closing delimiter")
+ }
+ if l.emitComment {
+ l.emit(itemComment)
+ }
+ if trimSpace {
+ l.pos += trimMarkerLen
+ }
+ l.pos += Pos(len(l.rightDelim))
+ if trimSpace {
+ l.pos += leftTrimLength(l.input[l.pos:])
+ }
+ l.ignore()
+ return lexText
+}
+
+// lexRightDelim scans the right delimiter, which is known to be present, possibly with a trim marker.
+func lexRightDelim(l *lexer) stateFn {
+ trimSpace := hasRightTrimMarker(l.input[l.pos:])
+ if trimSpace {
+ l.pos += trimMarkerLen
+ l.ignore()
+ }
+ l.pos += Pos(len(l.rightDelim))
+ l.emit(itemRightDelim)
+ if trimSpace {
+ l.pos += leftTrimLength(l.input[l.pos:])
+ l.ignore()
+ }
+ return lexText
+}
+
+// lexInsideAction scans the elements inside action delimiters.
+func lexInsideAction(l *lexer) stateFn {
+ // Either number, quoted string, or identifier.
+ // Spaces separate arguments; runs of spaces turn into itemSpace.
+ // Pipe symbols separate and are emitted.
+ delim, _ := l.atRightDelim()
+ if delim {
+ if l.parenDepth == 0 {
+ return lexRightDelim
+ }
+ return l.errorf("unclosed left paren")
+ }
+ switch r := l.next(); {
+ case r == eof:
+ return l.errorf("unclosed action")
+ case isSpace(r):
+ l.backup() // Put space back in case we have " -}}".
+ return lexSpace
+ case r == '=':
+ l.emit(itemAssign)
+ case r == ':':
+ if l.next() != '=' {
+ return l.errorf("expected :=")
+ }
+ l.emit(itemDeclare)
+ case r == '|':
+ l.emit(itemPipe)
+ case r == '"':
+ return lexQuote
+ case r == '`':
+ return lexRawQuote
+ case r == '$':
+ return lexVariable
+ case r == '\'':
+ return lexChar
+ case r == '.':
+ // special look-ahead for ".field" so we don't break l.backup().
+ if l.pos < Pos(len(l.input)) {
+ r := l.input[l.pos]
+ if r < '0' || '9' < r {
+ return lexField
+ }
+ }
+ fallthrough // '.' can start a number.
+ case r == '+' || r == '-' || ('0' <= r && r <= '9'):
+ l.backup()
+ return lexNumber
+ case isAlphaNumeric(r):
+ l.backup()
+ return lexIdentifier
+ case r == '(':
+ l.emit(itemLeftParen)
+ l.parenDepth++
+ case r == ')':
+ l.emit(itemRightParen)
+ l.parenDepth--
+ if l.parenDepth < 0 {
+ return l.errorf("unexpected right paren %#U", r)
+ }
+ case r <= unicode.MaxASCII && unicode.IsPrint(r):
+ l.emit(itemChar)
+ default:
+ return l.errorf("unrecognized character in action: %#U", r)
+ }
+ return lexInsideAction
+}
+
+// lexSpace scans a run of space characters.
+// We have not consumed the first space, which is known to be present.
+// Take care if there is a trim-marked right delimiter, which starts with a space.
+func lexSpace(l *lexer) stateFn {
+ var r rune
+ var numSpaces int
+ for {
+ r = l.peek()
+ if !isSpace(r) {
+ break
+ }
+ l.next()
+ numSpaces++
+ }
+ // Be careful about a trim-marked closing delimiter, which has a minus
+ // after a space. We know there is a space, so check for the '-' that might follow.
+ if hasRightTrimMarker(l.input[l.pos-1:]) && strings.HasPrefix(l.input[l.pos-1+trimMarkerLen:], l.rightDelim) {
+ l.backup() // Before the space.
+ if numSpaces == 1 {
+ return lexRightDelim // On the delim, so go right to that.
+ }
+ }
+ l.emit(itemSpace)
+ return lexInsideAction
+}
+
+// lexIdentifier scans an alphanumeric.
+func lexIdentifier(l *lexer) stateFn {
+Loop:
+ for {
+ switch r := l.next(); {
+ case isAlphaNumeric(r):
+ // absorb.
+ default:
+ l.backup()
+ word := l.input[l.start:l.pos]
+ if !l.atTerminator() {
+ return l.errorf("bad character %#U", r)
+ }
+ switch {
+ case key[word] > itemKeyword:
+ l.emit(key[word])
+ case word[0] == '.':
+ l.emit(itemField)
+ case word == "true", word == "false":
+ l.emit(itemBool)
+ default:
+ l.emit(itemIdentifier)
+ }
+ break Loop
+ }
+ }
+ return lexInsideAction
+}
+
+// lexField scans a field: .Alphanumeric.
+// The . has been scanned.
+func lexField(l *lexer) stateFn {
+ return lexFieldOrVariable(l, itemField)
+}
+
+// lexVariable scans a Variable: $Alphanumeric.
+// The $ has been scanned.
+func lexVariable(l *lexer) stateFn {
+ if l.atTerminator() { // Nothing interesting follows -> "$".
+ l.emit(itemVariable)
+ return lexInsideAction
+ }
+ return lexFieldOrVariable(l, itemVariable)
+}
+
+// lexVariable scans a field or variable: [.$]Alphanumeric.
+// The . or $ has been scanned.
+func lexFieldOrVariable(l *lexer, typ itemType) stateFn {
+ if l.atTerminator() { // Nothing interesting follows -> "." or "$".
+ if typ == itemVariable {
+ l.emit(itemVariable)
+ } else {
+ l.emit(itemDot)
+ }
+ return lexInsideAction
+ }
+ var r rune
+ for {
+ r = l.next()
+ if !isAlphaNumeric(r) {
+ l.backup()
+ break
+ }
+ }
+ if !l.atTerminator() {
+ return l.errorf("bad character %#U", r)
+ }
+ l.emit(typ)
+ return lexInsideAction
+}
+
+// atTerminator reports whether the input is at valid termination character to
+// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases
+// like "$x+2" not being acceptable without a space, in case we decide one
+// day to implement arithmetic.
+func (l *lexer) atTerminator() bool {
+ r := l.peek()
+ if isSpace(r) {
+ return true
+ }
+ switch r {
+ case eof, '.', ',', '|', ':', ')', '(':
+ return true
+ }
+ // Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will
+ // succeed but should fail) but only in extremely rare cases caused by willfully
+ // bad choice of delimiter.
+ if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r {
+ return true
+ }
+ return false
+}
+
+// lexChar scans a character constant. The initial quote is already
+// scanned. Syntax checking is done by the parser.
+func lexChar(l *lexer) stateFn {
+Loop:
+ for {
+ switch l.next() {
+ case '\\':
+ if r := l.next(); r != eof && r != '\n' {
+ break
+ }
+ fallthrough
+ case eof, '\n':
+ return l.errorf("unterminated character constant")
+ case '\'':
+ break Loop
+ }
+ }
+ l.emit(itemCharConstant)
+ return lexInsideAction
+}
+
+// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This
+// isn't a perfect number scanner - for instance it accepts "." and "0x0.2"
+// and "089" - but when it's wrong the input is invalid and the parser (via
+// strconv) will notice.
+func lexNumber(l *lexer) stateFn {
+ if !l.scanNumber() {
+ return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
+ }
+ if sign := l.peek(); sign == '+' || sign == '-' {
+ // Complex: 1+2i. No spaces, must end in 'i'.
+ if !l.scanNumber() || l.input[l.pos-1] != 'i' {
+ return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
+ }
+ l.emit(itemComplex)
+ } else {
+ l.emit(itemNumber)
+ }
+ return lexInsideAction
+}
+
+func (l *lexer) scanNumber() bool {
+ // Optional leading sign.
+ l.accept("+-")
+ // Is it hex?
+ digits := "0123456789_"
+ if l.accept("0") {
+ // Note: Leading 0 does not mean octal in floats.
+ if l.accept("xX") {
+ digits = "0123456789abcdefABCDEF_"
+ } else if l.accept("oO") {
+ digits = "01234567_"
+ } else if l.accept("bB") {
+ digits = "01_"
+ }
+ }
+ l.acceptRun(digits)
+ if l.accept(".") {
+ l.acceptRun(digits)
+ }
+ if len(digits) == 10+1 && l.accept("eE") {
+ l.accept("+-")
+ l.acceptRun("0123456789_")
+ }
+ if len(digits) == 16+6+1 && l.accept("pP") {
+ l.accept("+-")
+ l.acceptRun("0123456789_")
+ }
+ // Is it imaginary?
+ l.accept("i")
+ // Next thing mustn't be alphanumeric.
+ if isAlphaNumeric(l.peek()) {
+ l.next()
+ return false
+ }
+ return true
+}
+
+// lexQuote scans a quoted string.
+func lexQuote(l *lexer) stateFn {
+Loop:
+ for {
+ switch l.next() {
+ case '\\':
+ if r := l.next(); r != eof && r != '\n' {
+ break
+ }
+ fallthrough
+ case eof, '\n':
+ return l.errorf("unterminated quoted string")
+ case '"':
+ break Loop
+ }
+ }
+ l.emit(itemString)
+ return lexInsideAction
+}
+
+// lexRawQuote scans a raw quoted string.
+func lexRawQuote(l *lexer) stateFn {
+Loop:
+ for {
+ switch l.next() {
+ case eof:
+ return l.errorf("unterminated raw quoted string")
+ case '`':
+ break Loop
+ }
+ }
+ l.emit(itemRawString)
+ return lexInsideAction
+}
+
+// isSpace reports whether r is a space character.
+func isSpace(r rune) bool {
+ return r == ' ' || r == '\t' || r == '\r' || r == '\n'
+}
+
+// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore.
+func isAlphaNumeric(r rune) bool {
+ return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
+}
+
+func hasLeftTrimMarker(s string) bool {
+ return len(s) >= 2 && s[0] == trimMarker && isSpace(rune(s[1]))
+}
+
+func hasRightTrimMarker(s string) bool {
+ return len(s) >= 2 && isSpace(rune(s[0])) && s[1] == trimMarker
+}
diff --git a/src/text/template/parse/lex_test.go b/src/text/template/parse/lex_test.go
new file mode 100644
index 0000000..6510eed
--- /dev/null
+++ b/src/text/template/parse/lex_test.go
@@ -0,0 +1,557 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package parse
+
+import (
+ "fmt"
+ "testing"
+)
+
+// Make the types prettyprint.
+var itemName = map[itemType]string{
+ itemError: "error",
+ itemBool: "bool",
+ itemChar: "char",
+ itemCharConstant: "charconst",
+ itemComment: "comment",
+ itemComplex: "complex",
+ itemDeclare: ":=",
+ itemEOF: "EOF",
+ itemField: "field",
+ itemIdentifier: "identifier",
+ itemLeftDelim: "left delim",
+ itemLeftParen: "(",
+ itemNumber: "number",
+ itemPipe: "pipe",
+ itemRawString: "raw string",
+ itemRightDelim: "right delim",
+ itemRightParen: ")",
+ itemSpace: "space",
+ itemString: "string",
+ itemVariable: "variable",
+
+ // keywords
+ itemDot: ".",
+ itemBlock: "block",
+ itemDefine: "define",
+ itemElse: "else",
+ itemIf: "if",
+ itemEnd: "end",
+ itemNil: "nil",
+ itemRange: "range",
+ itemTemplate: "template",
+ itemWith: "with",
+}
+
+func (i itemType) String() string {
+ s := itemName[i]
+ if s == "" {
+ return fmt.Sprintf("item%d", int(i))
+ }
+ return s
+}
+
+type lexTest struct {
+ name string
+ input string
+ items []item
+}
+
+func mkItem(typ itemType, text string) item {
+ return item{
+ typ: typ,
+ val: text,
+ }
+}
+
+var (
+ tDot = mkItem(itemDot, ".")
+ tBlock = mkItem(itemBlock, "block")
+ tEOF = mkItem(itemEOF, "")
+ tFor = mkItem(itemIdentifier, "for")
+ tLeft = mkItem(itemLeftDelim, "{{")
+ tLpar = mkItem(itemLeftParen, "(")
+ tPipe = mkItem(itemPipe, "|")
+ tQuote = mkItem(itemString, `"abc \n\t\" "`)
+ tRange = mkItem(itemRange, "range")
+ tRight = mkItem(itemRightDelim, "}}")
+ tRpar = mkItem(itemRightParen, ")")
+ tSpace = mkItem(itemSpace, " ")
+ raw = "`" + `abc\n\t\" ` + "`"
+ rawNL = "`now is{{\n}}the time`" // Contains newline inside raw quote.
+ tRawQuote = mkItem(itemRawString, raw)
+ tRawQuoteNL = mkItem(itemRawString, rawNL)
+)
+
+var lexTests = []lexTest{
+ {"empty", "", []item{tEOF}},
+ {"spaces", " \t\n", []item{mkItem(itemText, " \t\n"), tEOF}},
+ {"text", `now is the time`, []item{mkItem(itemText, "now is the time"), tEOF}},
+ {"text with comment", "hello-{{/* this is a comment */}}-world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemComment, "/* this is a comment */"),
+ mkItem(itemText, "-world"),
+ tEOF,
+ }},
+ {"punctuation", "{{,@% }}", []item{
+ tLeft,
+ mkItem(itemChar, ","),
+ mkItem(itemChar, "@"),
+ mkItem(itemChar, "%"),
+ tSpace,
+ tRight,
+ tEOF,
+ }},
+ {"parens", "{{((3))}}", []item{
+ tLeft,
+ tLpar,
+ tLpar,
+ mkItem(itemNumber, "3"),
+ tRpar,
+ tRpar,
+ tRight,
+ tEOF,
+ }},
+ {"empty action", `{{}}`, []item{tLeft, tRight, tEOF}},
+ {"for", `{{for}}`, []item{tLeft, tFor, tRight, tEOF}},
+ {"block", `{{block "foo" .}}`, []item{
+ tLeft, tBlock, tSpace, mkItem(itemString, `"foo"`), tSpace, tDot, tRight, tEOF,
+ }},
+ {"quote", `{{"abc \n\t\" "}}`, []item{tLeft, tQuote, tRight, tEOF}},
+ {"raw quote", "{{" + raw + "}}", []item{tLeft, tRawQuote, tRight, tEOF}},
+ {"raw quote with newline", "{{" + rawNL + "}}", []item{tLeft, tRawQuoteNL, tRight, tEOF}},
+ {"numbers", "{{1 02 0x14 0X14 -7.2i 1e3 1E3 +1.2e-4 4.2i 1+2i 1_2 0x1.e_fp4 0X1.E_FP4}}", []item{
+ tLeft,
+ mkItem(itemNumber, "1"),
+ tSpace,
+ mkItem(itemNumber, "02"),
+ tSpace,
+ mkItem(itemNumber, "0x14"),
+ tSpace,
+ mkItem(itemNumber, "0X14"),
+ tSpace,
+ mkItem(itemNumber, "-7.2i"),
+ tSpace,
+ mkItem(itemNumber, "1e3"),
+ tSpace,
+ mkItem(itemNumber, "1E3"),
+ tSpace,
+ mkItem(itemNumber, "+1.2e-4"),
+ tSpace,
+ mkItem(itemNumber, "4.2i"),
+ tSpace,
+ mkItem(itemComplex, "1+2i"),
+ tSpace,
+ mkItem(itemNumber, "1_2"),
+ tSpace,
+ mkItem(itemNumber, "0x1.e_fp4"),
+ tSpace,
+ mkItem(itemNumber, "0X1.E_FP4"),
+ tRight,
+ tEOF,
+ }},
+ {"characters", `{{'a' '\n' '\'' '\\' '\u00FF' '\xFF' '本'}}`, []item{
+ tLeft,
+ mkItem(itemCharConstant, `'a'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\n'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\''`),
+ tSpace,
+ mkItem(itemCharConstant, `'\\'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\u00FF'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\xFF'`),
+ tSpace,
+ mkItem(itemCharConstant, `'本'`),
+ tRight,
+ tEOF,
+ }},
+ {"bools", "{{true false}}", []item{
+ tLeft,
+ mkItem(itemBool, "true"),
+ tSpace,
+ mkItem(itemBool, "false"),
+ tRight,
+ tEOF,
+ }},
+ {"dot", "{{.}}", []item{
+ tLeft,
+ tDot,
+ tRight,
+ tEOF,
+ }},
+ {"nil", "{{nil}}", []item{
+ tLeft,
+ mkItem(itemNil, "nil"),
+ tRight,
+ tEOF,
+ }},
+ {"dots", "{{.x . .2 .x.y.z}}", []item{
+ tLeft,
+ mkItem(itemField, ".x"),
+ tSpace,
+ tDot,
+ tSpace,
+ mkItem(itemNumber, ".2"),
+ tSpace,
+ mkItem(itemField, ".x"),
+ mkItem(itemField, ".y"),
+ mkItem(itemField, ".z"),
+ tRight,
+ tEOF,
+ }},
+ {"keywords", "{{range if else end with}}", []item{
+ tLeft,
+ mkItem(itemRange, "range"),
+ tSpace,
+ mkItem(itemIf, "if"),
+ tSpace,
+ mkItem(itemElse, "else"),
+ tSpace,
+ mkItem(itemEnd, "end"),
+ tSpace,
+ mkItem(itemWith, "with"),
+ tRight,
+ tEOF,
+ }},
+ {"variables", "{{$c := printf $ $hello $23 $ $var.Field .Method}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$c"),
+ tSpace,
+ mkItem(itemDeclare, ":="),
+ tSpace,
+ mkItem(itemIdentifier, "printf"),
+ tSpace,
+ mkItem(itemVariable, "$"),
+ tSpace,
+ mkItem(itemVariable, "$hello"),
+ tSpace,
+ mkItem(itemVariable, "$23"),
+ tSpace,
+ mkItem(itemVariable, "$"),
+ tSpace,
+ mkItem(itemVariable, "$var"),
+ mkItem(itemField, ".Field"),
+ tSpace,
+ mkItem(itemField, ".Method"),
+ tRight,
+ tEOF,
+ }},
+ {"variable invocation", "{{$x 23}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$x"),
+ tSpace,
+ mkItem(itemNumber, "23"),
+ tRight,
+ tEOF,
+ }},
+ {"pipeline", `intro {{echo hi 1.2 |noargs|args 1 "hi"}} outro`, []item{
+ mkItem(itemText, "intro "),
+ tLeft,
+ mkItem(itemIdentifier, "echo"),
+ tSpace,
+ mkItem(itemIdentifier, "hi"),
+ tSpace,
+ mkItem(itemNumber, "1.2"),
+ tSpace,
+ tPipe,
+ mkItem(itemIdentifier, "noargs"),
+ tPipe,
+ mkItem(itemIdentifier, "args"),
+ tSpace,
+ mkItem(itemNumber, "1"),
+ tSpace,
+ mkItem(itemString, `"hi"`),
+ tRight,
+ mkItem(itemText, " outro"),
+ tEOF,
+ }},
+ {"declaration", "{{$v := 3}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$v"),
+ tSpace,
+ mkItem(itemDeclare, ":="),
+ tSpace,
+ mkItem(itemNumber, "3"),
+ tRight,
+ tEOF,
+ }},
+ {"2 declarations", "{{$v , $w := 3}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$v"),
+ tSpace,
+ mkItem(itemChar, ","),
+ tSpace,
+ mkItem(itemVariable, "$w"),
+ tSpace,
+ mkItem(itemDeclare, ":="),
+ tSpace,
+ mkItem(itemNumber, "3"),
+ tRight,
+ tEOF,
+ }},
+ {"field of parenthesized expression", "{{(.X).Y}}", []item{
+ tLeft,
+ tLpar,
+ mkItem(itemField, ".X"),
+ tRpar,
+ mkItem(itemField, ".Y"),
+ tRight,
+ tEOF,
+ }},
+ {"trimming spaces before and after", "hello- {{- 3 -}} -world", []item{
+ mkItem(itemText, "hello-"),
+ tLeft,
+ mkItem(itemNumber, "3"),
+ tRight,
+ mkItem(itemText, "-world"),
+ tEOF,
+ }},
+ {"trimming spaces before and after comment", "hello- {{- /* hello */ -}} -world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemComment, "/* hello */"),
+ mkItem(itemText, "-world"),
+ tEOF,
+ }},
+ // errors
+ {"badchar", "#{{\x01}}", []item{
+ mkItem(itemText, "#"),
+ tLeft,
+ mkItem(itemError, "unrecognized character in action: U+0001"),
+ }},
+ {"unclosed action", "{{", []item{
+ tLeft,
+ mkItem(itemError, "unclosed action"),
+ }},
+ {"EOF in action", "{{range", []item{
+ tLeft,
+ tRange,
+ mkItem(itemError, "unclosed action"),
+ }},
+ {"unclosed quote", "{{\"\n\"}}", []item{
+ tLeft,
+ mkItem(itemError, "unterminated quoted string"),
+ }},
+ {"unclosed raw quote", "{{`xx}}", []item{
+ tLeft,
+ mkItem(itemError, "unterminated raw quoted string"),
+ }},
+ {"unclosed char constant", "{{'\n}}", []item{
+ tLeft,
+ mkItem(itemError, "unterminated character constant"),
+ }},
+ {"bad number", "{{3k}}", []item{
+ tLeft,
+ mkItem(itemError, `bad number syntax: "3k"`),
+ }},
+ {"unclosed paren", "{{(3}}", []item{
+ tLeft,
+ tLpar,
+ mkItem(itemNumber, "3"),
+ mkItem(itemError, `unclosed left paren`),
+ }},
+ {"extra right paren", "{{3)}}", []item{
+ tLeft,
+ mkItem(itemNumber, "3"),
+ tRpar,
+ mkItem(itemError, `unexpected right paren U+0029 ')'`),
+ }},
+
+ // Fixed bugs
+ // Many elements in an action blew the lookahead until
+ // we made lexInsideAction not loop.
+ {"long pipeline deadlock", "{{|||||}}", []item{
+ tLeft,
+ tPipe,
+ tPipe,
+ tPipe,
+ tPipe,
+ tPipe,
+ tRight,
+ tEOF,
+ }},
+ {"text with bad comment", "hello-{{/*/}}-world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemError, `unclosed comment`),
+ }},
+ {"text with comment close separated from delim", "hello-{{/* */ }}-world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemError, `comment ends before closing delimiter`),
+ }},
+ // This one is an error that we can't catch because it breaks templates with
+ // minimized JavaScript. Should have fixed it before Go 1.1.
+ {"unmatched right delimiter", "hello-{.}}-world", []item{
+ mkItem(itemText, "hello-{.}}-world"),
+ tEOF,
+ }},
+}
+
+// collect gathers the emitted items into a slice.
+func collect(t *lexTest, left, right string) (items []item) {
+ l := lex(t.name, t.input, left, right, true)
+ for {
+ item := l.nextItem()
+ items = append(items, item)
+ if item.typ == itemEOF || item.typ == itemError {
+ break
+ }
+ }
+ return
+}
+
+func equal(i1, i2 []item, checkPos bool) bool {
+ if len(i1) != len(i2) {
+ return false
+ }
+ for k := range i1 {
+ if i1[k].typ != i2[k].typ {
+ return false
+ }
+ if i1[k].val != i2[k].val {
+ return false
+ }
+ if checkPos && i1[k].pos != i2[k].pos {
+ return false
+ }
+ if checkPos && i1[k].line != i2[k].line {
+ return false
+ }
+ }
+ return true
+}
+
+func TestLex(t *testing.T) {
+ for _, test := range lexTests {
+ items := collect(&test, "", "")
+ if !equal(items, test.items, false) {
+ t.Errorf("%s: got\n\t%+v\nexpected\n\t%v", test.name, items, test.items)
+ }
+ }
+}
+
+// Some easy cases from above, but with delimiters $$ and @@
+var lexDelimTests = []lexTest{
+ {"punctuation", "$$,@%{{}}@@", []item{
+ tLeftDelim,
+ mkItem(itemChar, ","),
+ mkItem(itemChar, "@"),
+ mkItem(itemChar, "%"),
+ mkItem(itemChar, "{"),
+ mkItem(itemChar, "{"),
+ mkItem(itemChar, "}"),
+ mkItem(itemChar, "}"),
+ tRightDelim,
+ tEOF,
+ }},
+ {"empty action", `$$@@`, []item{tLeftDelim, tRightDelim, tEOF}},
+ {"for", `$$for@@`, []item{tLeftDelim, tFor, tRightDelim, tEOF}},
+ {"quote", `$$"abc \n\t\" "@@`, []item{tLeftDelim, tQuote, tRightDelim, tEOF}},
+ {"raw quote", "$$" + raw + "@@", []item{tLeftDelim, tRawQuote, tRightDelim, tEOF}},
+}
+
+var (
+ tLeftDelim = mkItem(itemLeftDelim, "$$")
+ tRightDelim = mkItem(itemRightDelim, "@@")
+)
+
+func TestDelims(t *testing.T) {
+ for _, test := range lexDelimTests {
+ items := collect(&test, "$$", "@@")
+ if !equal(items, test.items, false) {
+ t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items)
+ }
+ }
+}
+
+var lexPosTests = []lexTest{
+ {"empty", "", []item{{itemEOF, 0, "", 1}}},
+ {"punctuation", "{{,@%#}}", []item{
+ {itemLeftDelim, 0, "{{", 1},
+ {itemChar, 2, ",", 1},
+ {itemChar, 3, "@", 1},
+ {itemChar, 4, "%", 1},
+ {itemChar, 5, "#", 1},
+ {itemRightDelim, 6, "}}", 1},
+ {itemEOF, 8, "", 1},
+ }},
+ {"sample", "0123{{hello}}xyz", []item{
+ {itemText, 0, "0123", 1},
+ {itemLeftDelim, 4, "{{", 1},
+ {itemIdentifier, 6, "hello", 1},
+ {itemRightDelim, 11, "}}", 1},
+ {itemText, 13, "xyz", 1},
+ {itemEOF, 16, "", 1},
+ }},
+ {"trimafter", "{{x -}}\n{{y}}", []item{
+ {itemLeftDelim, 0, "{{", 1},
+ {itemIdentifier, 2, "x", 1},
+ {itemRightDelim, 5, "}}", 1},
+ {itemLeftDelim, 8, "{{", 2},
+ {itemIdentifier, 10, "y", 2},
+ {itemRightDelim, 11, "}}", 2},
+ {itemEOF, 13, "", 2},
+ }},
+ {"trimbefore", "{{x}}\n{{- y}}", []item{
+ {itemLeftDelim, 0, "{{", 1},
+ {itemIdentifier, 2, "x", 1},
+ {itemRightDelim, 3, "}}", 1},
+ {itemLeftDelim, 6, "{{", 2},
+ {itemIdentifier, 10, "y", 2},
+ {itemRightDelim, 11, "}}", 2},
+ {itemEOF, 13, "", 2},
+ }},
+}
+
+// The other tests don't check position, to make the test cases easier to construct.
+// This one does.
+func TestPos(t *testing.T) {
+ for _, test := range lexPosTests {
+ items := collect(&test, "", "")
+ if !equal(items, test.items, true) {
+ t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items)
+ if len(items) == len(test.items) {
+ // Detailed print; avoid item.String() to expose the position value.
+ for i := range items {
+ if !equal(items[i:i+1], test.items[i:i+1], true) {
+ i1 := items[i]
+ i2 := test.items[i]
+ t.Errorf("\t#%d: got {%v %d %q %d} expected {%v %d %q %d}",
+ i, i1.typ, i1.pos, i1.val, i1.line, i2.typ, i2.pos, i2.val, i2.line)
+ }
+ }
+ }
+ }
+ }
+}
+
+// Test that an error shuts down the lexing goroutine.
+func TestShutdown(t *testing.T) {
+ // We need to duplicate template.Parse here to hold on to the lexer.
+ const text = "erroneous{{define}}{{else}}1234"
+ lexer := lex("foo", text, "{{", "}}", false)
+ _, err := New("root").parseLexer(lexer)
+ if err == nil {
+ t.Fatalf("expected error")
+ }
+ // The error should have drained the input. Therefore, the lexer should be shut down.
+ token, ok := <-lexer.items
+ if ok {
+ t.Fatalf("input was not drained; got %v", token)
+ }
+}
+
+// parseLexer is a local version of parse that lets us pass in the lexer instead of building it.
+// We expect an error, so the tree set and funcs list are explicitly nil.
+func (t *Tree) parseLexer(lex *lexer) (tree *Tree, err error) {
+ defer t.recover(&err)
+ t.ParseName = t.Name
+ t.startParse(nil, lex, map[string]*Tree{})
+ t.parse()
+ t.add()
+ t.stopParse()
+ return t, nil
+}
diff --git a/src/text/template/parse/node.go b/src/text/template/parse/node.go
new file mode 100644
index 0000000..177482f
--- /dev/null
+++ b/src/text/template/parse/node.go
@@ -0,0 +1,972 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Parse nodes.
+
+package parse
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+var textFormat = "%s" // Changed to "%q" in tests for better error messages.
+
+// A Node is an element in the parse tree. The interface is trivial.
+// The interface contains an unexported method so that only
+// types local to this package can satisfy it.
+type Node interface {
+ Type() NodeType
+ String() string
+ // Copy does a deep copy of the Node and all its components.
+ // To avoid type assertions, some XxxNodes also have specialized
+ // CopyXxx methods that return *XxxNode.
+ Copy() Node
+ Position() Pos // byte position of start of node in full original input string
+ // tree returns the containing *Tree.
+ // It is unexported so all implementations of Node are in this package.
+ tree() *Tree
+ // writeTo writes the String output to the builder.
+ writeTo(*strings.Builder)
+}
+
+// NodeType identifies the type of a parse tree node.
+type NodeType int
+
+// Pos represents a byte position in the original input text from which
+// this template was parsed.
+type Pos int
+
+func (p Pos) Position() Pos {
+ return p
+}
+
+// Type returns itself and provides an easy default implementation
+// for embedding in a Node. Embedded in all non-trivial Nodes.
+func (t NodeType) Type() NodeType {
+ return t
+}
+
+const (
+ NodeText NodeType = iota // Plain text.
+ NodeAction // A non-control action such as a field evaluation.
+ NodeBool // A boolean constant.
+ NodeChain // A sequence of field accesses.
+ NodeCommand // An element of a pipeline.
+ NodeDot // The cursor, dot.
+ nodeElse // An else action. Not added to tree.
+ nodeEnd // An end action. Not added to tree.
+ NodeField // A field or method name.
+ NodeIdentifier // An identifier; always a function name.
+ NodeIf // An if action.
+ NodeList // A list of Nodes.
+ NodeNil // An untyped nil constant.
+ NodeNumber // A numerical constant.
+ NodePipe // A pipeline of commands.
+ NodeRange // A range action.
+ NodeString // A string constant.
+ NodeTemplate // A template invocation action.
+ NodeVariable // A $ variable.
+ NodeWith // A with action.
+ NodeComment // A comment.
+)
+
+// Nodes.
+
+// ListNode holds a sequence of nodes.
+type ListNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Nodes []Node // The element nodes in lexical order.
+}
+
+func (t *Tree) newList(pos Pos) *ListNode {
+ return &ListNode{tr: t, NodeType: NodeList, Pos: pos}
+}
+
+func (l *ListNode) append(n Node) {
+ l.Nodes = append(l.Nodes, n)
+}
+
+func (l *ListNode) tree() *Tree {
+ return l.tr
+}
+
+func (l *ListNode) String() string {
+ var sb strings.Builder
+ l.writeTo(&sb)
+ return sb.String()
+}
+
+func (l *ListNode) writeTo(sb *strings.Builder) {
+ for _, n := range l.Nodes {
+ n.writeTo(sb)
+ }
+}
+
+func (l *ListNode) CopyList() *ListNode {
+ if l == nil {
+ return l
+ }
+ n := l.tr.newList(l.Pos)
+ for _, elem := range l.Nodes {
+ n.append(elem.Copy())
+ }
+ return n
+}
+
+func (l *ListNode) Copy() Node {
+ return l.CopyList()
+}
+
+// TextNode holds plain text.
+type TextNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Text []byte // The text; may span newlines.
+}
+
+func (t *Tree) newText(pos Pos, text string) *TextNode {
+ return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)}
+}
+
+func (t *TextNode) String() string {
+ return fmt.Sprintf(textFormat, t.Text)
+}
+
+func (t *TextNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(t.String())
+}
+
+func (t *TextNode) tree() *Tree {
+ return t.tr
+}
+
+func (t *TextNode) Copy() Node {
+ return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)}
+}
+
+// CommentNode holds a comment.
+type CommentNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Text string // Comment text.
+}
+
+func (t *Tree) newComment(pos Pos, text string) *CommentNode {
+ return &CommentNode{tr: t, NodeType: NodeComment, Pos: pos, Text: text}
+}
+
+func (c *CommentNode) String() string {
+ var sb strings.Builder
+ c.writeTo(&sb)
+ return sb.String()
+}
+
+func (c *CommentNode) writeTo(sb *strings.Builder) {
+ sb.WriteString("{{")
+ sb.WriteString(c.Text)
+ sb.WriteString("}}")
+}
+
+func (c *CommentNode) tree() *Tree {
+ return c.tr
+}
+
+func (c *CommentNode) Copy() Node {
+ return &CommentNode{tr: c.tr, NodeType: NodeComment, Pos: c.Pos, Text: c.Text}
+}
+
+// PipeNode holds a pipeline with optional declaration
+type PipeNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ IsAssign bool // The variables are being assigned, not declared.
+ Decl []*VariableNode // Variables in lexical order.
+ Cmds []*CommandNode // The commands in lexical order.
+}
+
+func (t *Tree) newPipeline(pos Pos, line int, vars []*VariableNode) *PipeNode {
+ return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: vars}
+}
+
+func (p *PipeNode) append(command *CommandNode) {
+ p.Cmds = append(p.Cmds, command)
+}
+
+func (p *PipeNode) String() string {
+ var sb strings.Builder
+ p.writeTo(&sb)
+ return sb.String()
+}
+
+func (p *PipeNode) writeTo(sb *strings.Builder) {
+ if len(p.Decl) > 0 {
+ for i, v := range p.Decl {
+ if i > 0 {
+ sb.WriteString(", ")
+ }
+ v.writeTo(sb)
+ }
+ sb.WriteString(" := ")
+ }
+ for i, c := range p.Cmds {
+ if i > 0 {
+ sb.WriteString(" | ")
+ }
+ c.writeTo(sb)
+ }
+}
+
+func (p *PipeNode) tree() *Tree {
+ return p.tr
+}
+
+func (p *PipeNode) CopyPipe() *PipeNode {
+ if p == nil {
+ return p
+ }
+ vars := make([]*VariableNode, len(p.Decl))
+ for i, d := range p.Decl {
+ vars[i] = d.Copy().(*VariableNode)
+ }
+ n := p.tr.newPipeline(p.Pos, p.Line, vars)
+ n.IsAssign = p.IsAssign
+ for _, c := range p.Cmds {
+ n.append(c.Copy().(*CommandNode))
+ }
+ return n
+}
+
+func (p *PipeNode) Copy() Node {
+ return p.CopyPipe()
+}
+
+// ActionNode holds an action (something bounded by delimiters).
+// Control actions have their own nodes; ActionNode represents simple
+// ones such as field evaluations and parenthesized pipelines.
+type ActionNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ Pipe *PipeNode // The pipeline in the action.
+}
+
+func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode {
+ return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe}
+}
+
+func (a *ActionNode) String() string {
+ var sb strings.Builder
+ a.writeTo(&sb)
+ return sb.String()
+}
+
+func (a *ActionNode) writeTo(sb *strings.Builder) {
+ sb.WriteString("{{")
+ a.Pipe.writeTo(sb)
+ sb.WriteString("}}")
+}
+
+func (a *ActionNode) tree() *Tree {
+ return a.tr
+}
+
+func (a *ActionNode) Copy() Node {
+ return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe())
+
+}
+
+// CommandNode holds a command (a pipeline inside an evaluating action).
+type CommandNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Args []Node // Arguments in lexical order: Identifier, field, or constant.
+}
+
+func (t *Tree) newCommand(pos Pos) *CommandNode {
+ return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos}
+}
+
+func (c *CommandNode) append(arg Node) {
+ c.Args = append(c.Args, arg)
+}
+
+func (c *CommandNode) String() string {
+ var sb strings.Builder
+ c.writeTo(&sb)
+ return sb.String()
+}
+
+func (c *CommandNode) writeTo(sb *strings.Builder) {
+ for i, arg := range c.Args {
+ if i > 0 {
+ sb.WriteByte(' ')
+ }
+ if arg, ok := arg.(*PipeNode); ok {
+ sb.WriteByte('(')
+ arg.writeTo(sb)
+ sb.WriteByte(')')
+ continue
+ }
+ arg.writeTo(sb)
+ }
+}
+
+func (c *CommandNode) tree() *Tree {
+ return c.tr
+}
+
+func (c *CommandNode) Copy() Node {
+ if c == nil {
+ return c
+ }
+ n := c.tr.newCommand(c.Pos)
+ for _, c := range c.Args {
+ n.append(c.Copy())
+ }
+ return n
+}
+
+// IdentifierNode holds an identifier.
+type IdentifierNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Ident string // The identifier's name.
+}
+
+// NewIdentifier returns a new IdentifierNode with the given identifier name.
+func NewIdentifier(ident string) *IdentifierNode {
+ return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident}
+}
+
+// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature.
+// Chained for convenience.
+// TODO: fix one day?
+func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode {
+ i.Pos = pos
+ return i
+}
+
+// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature.
+// Chained for convenience.
+// TODO: fix one day?
+func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode {
+ i.tr = t
+ return i
+}
+
+func (i *IdentifierNode) String() string {
+ return i.Ident
+}
+
+func (i *IdentifierNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(i.String())
+}
+
+func (i *IdentifierNode) tree() *Tree {
+ return i.tr
+}
+
+func (i *IdentifierNode) Copy() Node {
+ return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos)
+}
+
+// VariableNode holds a list of variable names, possibly with chained field
+// accesses. The dollar sign is part of the (first) name.
+type VariableNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Ident []string // Variable name and fields in lexical order.
+}
+
+func (t *Tree) newVariable(pos Pos, ident string) *VariableNode {
+ return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")}
+}
+
+func (v *VariableNode) String() string {
+ var sb strings.Builder
+ v.writeTo(&sb)
+ return sb.String()
+}
+
+func (v *VariableNode) writeTo(sb *strings.Builder) {
+ for i, id := range v.Ident {
+ if i > 0 {
+ sb.WriteByte('.')
+ }
+ sb.WriteString(id)
+ }
+}
+
+func (v *VariableNode) tree() *Tree {
+ return v.tr
+}
+
+func (v *VariableNode) Copy() Node {
+ return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)}
+}
+
+// DotNode holds the special identifier '.'.
+type DotNode struct {
+ NodeType
+ Pos
+ tr *Tree
+}
+
+func (t *Tree) newDot(pos Pos) *DotNode {
+ return &DotNode{tr: t, NodeType: NodeDot, Pos: pos}
+}
+
+func (d *DotNode) Type() NodeType {
+ // Override method on embedded NodeType for API compatibility.
+ // TODO: Not really a problem; could change API without effect but
+ // api tool complains.
+ return NodeDot
+}
+
+func (d *DotNode) String() string {
+ return "."
+}
+
+func (d *DotNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(d.String())
+}
+
+func (d *DotNode) tree() *Tree {
+ return d.tr
+}
+
+func (d *DotNode) Copy() Node {
+ return d.tr.newDot(d.Pos)
+}
+
+// NilNode holds the special identifier 'nil' representing an untyped nil constant.
+type NilNode struct {
+ NodeType
+ Pos
+ tr *Tree
+}
+
+func (t *Tree) newNil(pos Pos) *NilNode {
+ return &NilNode{tr: t, NodeType: NodeNil, Pos: pos}
+}
+
+func (n *NilNode) Type() NodeType {
+ // Override method on embedded NodeType for API compatibility.
+ // TODO: Not really a problem; could change API without effect but
+ // api tool complains.
+ return NodeNil
+}
+
+func (n *NilNode) String() string {
+ return "nil"
+}
+
+func (n *NilNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(n.String())
+}
+
+func (n *NilNode) tree() *Tree {
+ return n.tr
+}
+
+func (n *NilNode) Copy() Node {
+ return n.tr.newNil(n.Pos)
+}
+
+// FieldNode holds a field (identifier starting with '.').
+// The names may be chained ('.x.y').
+// The period is dropped from each ident.
+type FieldNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Ident []string // The identifiers in lexical order.
+}
+
+func (t *Tree) newField(pos Pos, ident string) *FieldNode {
+ return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period
+}
+
+func (f *FieldNode) String() string {
+ var sb strings.Builder
+ f.writeTo(&sb)
+ return sb.String()
+}
+
+func (f *FieldNode) writeTo(sb *strings.Builder) {
+ for _, id := range f.Ident {
+ sb.WriteByte('.')
+ sb.WriteString(id)
+ }
+}
+
+func (f *FieldNode) tree() *Tree {
+ return f.tr
+}
+
+func (f *FieldNode) Copy() Node {
+ return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)}
+}
+
+// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.').
+// The names may be chained ('.x.y').
+// The periods are dropped from each ident.
+type ChainNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Node Node
+ Field []string // The identifiers in lexical order.
+}
+
+func (t *Tree) newChain(pos Pos, node Node) *ChainNode {
+ return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node}
+}
+
+// Add adds the named field (which should start with a period) to the end of the chain.
+func (c *ChainNode) Add(field string) {
+ if len(field) == 0 || field[0] != '.' {
+ panic("no dot in field")
+ }
+ field = field[1:] // Remove leading dot.
+ if field == "" {
+ panic("empty field")
+ }
+ c.Field = append(c.Field, field)
+}
+
+func (c *ChainNode) String() string {
+ var sb strings.Builder
+ c.writeTo(&sb)
+ return sb.String()
+}
+
+func (c *ChainNode) writeTo(sb *strings.Builder) {
+ if _, ok := c.Node.(*PipeNode); ok {
+ sb.WriteByte('(')
+ c.Node.writeTo(sb)
+ sb.WriteByte(')')
+ } else {
+ c.Node.writeTo(sb)
+ }
+ for _, field := range c.Field {
+ sb.WriteByte('.')
+ sb.WriteString(field)
+ }
+}
+
+func (c *ChainNode) tree() *Tree {
+ return c.tr
+}
+
+func (c *ChainNode) Copy() Node {
+ return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)}
+}
+
+// BoolNode holds a boolean constant.
+type BoolNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ True bool // The value of the boolean constant.
+}
+
+func (t *Tree) newBool(pos Pos, true bool) *BoolNode {
+ return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true}
+}
+
+func (b *BoolNode) String() string {
+ if b.True {
+ return "true"
+ }
+ return "false"
+}
+
+func (b *BoolNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(b.String())
+}
+
+func (b *BoolNode) tree() *Tree {
+ return b.tr
+}
+
+func (b *BoolNode) Copy() Node {
+ return b.tr.newBool(b.Pos, b.True)
+}
+
+// NumberNode holds a number: signed or unsigned integer, float, or complex.
+// The value is parsed and stored under all the types that can represent the value.
+// This simulates in a small amount of code the behavior of Go's ideal constants.
+type NumberNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ IsInt bool // Number has an integral value.
+ IsUint bool // Number has an unsigned integral value.
+ IsFloat bool // Number has a floating-point value.
+ IsComplex bool // Number is complex.
+ Int64 int64 // The signed integer value.
+ Uint64 uint64 // The unsigned integer value.
+ Float64 float64 // The floating-point value.
+ Complex128 complex128 // The complex value.
+ Text string // The original textual representation from the input.
+}
+
+func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) {
+ n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text}
+ switch typ {
+ case itemCharConstant:
+ rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0])
+ if err != nil {
+ return nil, err
+ }
+ if tail != "'" {
+ return nil, fmt.Errorf("malformed character constant: %s", text)
+ }
+ n.Int64 = int64(rune)
+ n.IsInt = true
+ n.Uint64 = uint64(rune)
+ n.IsUint = true
+ n.Float64 = float64(rune) // odd but those are the rules.
+ n.IsFloat = true
+ return n, nil
+ case itemComplex:
+ // fmt.Sscan can parse the pair, so let it do the work.
+ if _, err := fmt.Sscan(text, &n.Complex128); err != nil {
+ return nil, err
+ }
+ n.IsComplex = true
+ n.simplifyComplex()
+ return n, nil
+ }
+ // Imaginary constants can only be complex unless they are zero.
+ if len(text) > 0 && text[len(text)-1] == 'i' {
+ f, err := strconv.ParseFloat(text[:len(text)-1], 64)
+ if err == nil {
+ n.IsComplex = true
+ n.Complex128 = complex(0, f)
+ n.simplifyComplex()
+ return n, nil
+ }
+ }
+ // Do integer test first so we get 0x123 etc.
+ u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below.
+ if err == nil {
+ n.IsUint = true
+ n.Uint64 = u
+ }
+ i, err := strconv.ParseInt(text, 0, 64)
+ if err == nil {
+ n.IsInt = true
+ n.Int64 = i
+ if i == 0 {
+ n.IsUint = true // in case of -0.
+ n.Uint64 = u
+ }
+ }
+ // If an integer extraction succeeded, promote the float.
+ if n.IsInt {
+ n.IsFloat = true
+ n.Float64 = float64(n.Int64)
+ } else if n.IsUint {
+ n.IsFloat = true
+ n.Float64 = float64(n.Uint64)
+ } else {
+ f, err := strconv.ParseFloat(text, 64)
+ if err == nil {
+ // If we parsed it as a float but it looks like an integer,
+ // it's a huge number too large to fit in an int. Reject it.
+ if !strings.ContainsAny(text, ".eEpP") {
+ return nil, fmt.Errorf("integer overflow: %q", text)
+ }
+ n.IsFloat = true
+ n.Float64 = f
+ // If a floating-point extraction succeeded, extract the int if needed.
+ if !n.IsInt && float64(int64(f)) == f {
+ n.IsInt = true
+ n.Int64 = int64(f)
+ }
+ if !n.IsUint && float64(uint64(f)) == f {
+ n.IsUint = true
+ n.Uint64 = uint64(f)
+ }
+ }
+ }
+ if !n.IsInt && !n.IsUint && !n.IsFloat {
+ return nil, fmt.Errorf("illegal number syntax: %q", text)
+ }
+ return n, nil
+}
+
+// simplifyComplex pulls out any other types that are represented by the complex number.
+// These all require that the imaginary part be zero.
+func (n *NumberNode) simplifyComplex() {
+ n.IsFloat = imag(n.Complex128) == 0
+ if n.IsFloat {
+ n.Float64 = real(n.Complex128)
+ n.IsInt = float64(int64(n.Float64)) == n.Float64
+ if n.IsInt {
+ n.Int64 = int64(n.Float64)
+ }
+ n.IsUint = float64(uint64(n.Float64)) == n.Float64
+ if n.IsUint {
+ n.Uint64 = uint64(n.Float64)
+ }
+ }
+}
+
+func (n *NumberNode) String() string {
+ return n.Text
+}
+
+func (n *NumberNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(n.String())
+}
+
+func (n *NumberNode) tree() *Tree {
+ return n.tr
+}
+
+func (n *NumberNode) Copy() Node {
+ nn := new(NumberNode)
+ *nn = *n // Easy, fast, correct.
+ return nn
+}
+
+// StringNode holds a string constant. The value has been "unquoted".
+type StringNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Quoted string // The original text of the string, with quotes.
+ Text string // The string, after quote processing.
+}
+
+func (t *Tree) newString(pos Pos, orig, text string) *StringNode {
+ return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text}
+}
+
+func (s *StringNode) String() string {
+ return s.Quoted
+}
+
+func (s *StringNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(s.String())
+}
+
+func (s *StringNode) tree() *Tree {
+ return s.tr
+}
+
+func (s *StringNode) Copy() Node {
+ return s.tr.newString(s.Pos, s.Quoted, s.Text)
+}
+
+// endNode represents an {{end}} action.
+// It does not appear in the final parse tree.
+type endNode struct {
+ NodeType
+ Pos
+ tr *Tree
+}
+
+func (t *Tree) newEnd(pos Pos) *endNode {
+ return &endNode{tr: t, NodeType: nodeEnd, Pos: pos}
+}
+
+func (e *endNode) String() string {
+ return "{{end}}"
+}
+
+func (e *endNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(e.String())
+}
+
+func (e *endNode) tree() *Tree {
+ return e.tr
+}
+
+func (e *endNode) Copy() Node {
+ return e.tr.newEnd(e.Pos)
+}
+
+// elseNode represents an {{else}} action. Does not appear in the final tree.
+type elseNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+}
+
+func (t *Tree) newElse(pos Pos, line int) *elseNode {
+ return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line}
+}
+
+func (e *elseNode) Type() NodeType {
+ return nodeElse
+}
+
+func (e *elseNode) String() string {
+ return "{{else}}"
+}
+
+func (e *elseNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(e.String())
+}
+
+func (e *elseNode) tree() *Tree {
+ return e.tr
+}
+
+func (e *elseNode) Copy() Node {
+ return e.tr.newElse(e.Pos, e.Line)
+}
+
+// BranchNode is the common representation of if, range, and with.
+type BranchNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ Pipe *PipeNode // The pipeline to be evaluated.
+ List *ListNode // What to execute if the value is non-empty.
+ ElseList *ListNode // What to execute if the value is empty (nil if absent).
+}
+
+func (b *BranchNode) String() string {
+ var sb strings.Builder
+ b.writeTo(&sb)
+ return sb.String()
+}
+
+func (b *BranchNode) writeTo(sb *strings.Builder) {
+ name := ""
+ switch b.NodeType {
+ case NodeIf:
+ name = "if"
+ case NodeRange:
+ name = "range"
+ case NodeWith:
+ name = "with"
+ default:
+ panic("unknown branch type")
+ }
+ sb.WriteString("{{")
+ sb.WriteString(name)
+ sb.WriteByte(' ')
+ b.Pipe.writeTo(sb)
+ sb.WriteString("}}")
+ b.List.writeTo(sb)
+ if b.ElseList != nil {
+ sb.WriteString("{{else}}")
+ b.ElseList.writeTo(sb)
+ }
+ sb.WriteString("{{end}}")
+}
+
+func (b *BranchNode) tree() *Tree {
+ return b.tr
+}
+
+func (b *BranchNode) Copy() Node {
+ switch b.NodeType {
+ case NodeIf:
+ return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
+ case NodeRange:
+ return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
+ case NodeWith:
+ return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
+ default:
+ panic("unknown branch type")
+ }
+}
+
+// IfNode represents an {{if}} action and its commands.
+type IfNode struct {
+ BranchNode
+}
+
+func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode {
+ return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
+}
+
+func (i *IfNode) Copy() Node {
+ return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList())
+}
+
+// RangeNode represents a {{range}} action and its commands.
+type RangeNode struct {
+ BranchNode
+}
+
+func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode {
+ return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
+}
+
+func (r *RangeNode) Copy() Node {
+ return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList())
+}
+
+// WithNode represents a {{with}} action and its commands.
+type WithNode struct {
+ BranchNode
+}
+
+func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode {
+ return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
+}
+
+func (w *WithNode) Copy() Node {
+ return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList())
+}
+
+// TemplateNode represents a {{template}} action.
+type TemplateNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ Name string // The name of the template (unquoted).
+ Pipe *PipeNode // The command to evaluate as dot for the template.
+}
+
+func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode {
+ return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe}
+}
+
+func (t *TemplateNode) String() string {
+ var sb strings.Builder
+ t.writeTo(&sb)
+ return sb.String()
+}
+
+func (t *TemplateNode) writeTo(sb *strings.Builder) {
+ sb.WriteString("{{template ")
+ sb.WriteString(strconv.Quote(t.Name))
+ if t.Pipe != nil {
+ sb.WriteByte(' ')
+ t.Pipe.writeTo(sb)
+ }
+ sb.WriteString("}}")
+}
+
+func (t *TemplateNode) tree() *Tree {
+ return t.tr
+}
+
+func (t *TemplateNode) Copy() Node {
+ return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe())
+}
diff --git a/src/text/template/parse/parse.go b/src/text/template/parse/parse.go
new file mode 100644
index 0000000..5e6e512
--- /dev/null
+++ b/src/text/template/parse/parse.go
@@ -0,0 +1,754 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package parse builds parse trees for templates as defined by text/template
+// and html/template. Clients should use those packages to construct templates
+// rather than this one, which provides shared internal data structures not
+// intended for general use.
+package parse
+
+import (
+ "bytes"
+ "fmt"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+// Tree is the representation of a single parsed template.
+type Tree struct {
+ Name string // name of the template represented by the tree.
+ ParseName string // name of the top-level template during parsing, for error messages.
+ Root *ListNode // top-level root of the tree.
+ Mode Mode // parsing mode.
+ text string // text parsed to create the template (or its parent)
+ // Parsing only; cleared after parse.
+ funcs []map[string]interface{}
+ lex *lexer
+ token [3]item // three-token lookahead for parser.
+ peekCount int
+ vars []string // variables defined at the moment.
+ treeSet map[string]*Tree
+ actionLine int // line of left delim starting action
+ mode Mode
+}
+
+// A mode value is a set of flags (or 0). Modes control parser behavior.
+type Mode uint
+
+const (
+ ParseComments Mode = 1 << iota // parse comments and add them to AST
+)
+
+// Copy returns a copy of the Tree. Any parsing state is discarded.
+func (t *Tree) Copy() *Tree {
+ if t == nil {
+ return nil
+ }
+ return &Tree{
+ Name: t.Name,
+ ParseName: t.ParseName,
+ Root: t.Root.CopyList(),
+ text: t.text,
+ }
+}
+
+// Parse returns a map from template name to parse.Tree, created by parsing the
+// templates described in the argument string. The top-level template will be
+// given the specified name. If an error is encountered, parsing stops and an
+// empty map is returned with the error.
+func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (map[string]*Tree, error) {
+ treeSet := make(map[string]*Tree)
+ t := New(name)
+ t.text = text
+ _, err := t.Parse(text, leftDelim, rightDelim, treeSet, funcs...)
+ return treeSet, err
+}
+
+// next returns the next token.
+func (t *Tree) next() item {
+ if t.peekCount > 0 {
+ t.peekCount--
+ } else {
+ t.token[0] = t.lex.nextItem()
+ }
+ return t.token[t.peekCount]
+}
+
+// backup backs the input stream up one token.
+func (t *Tree) backup() {
+ t.peekCount++
+}
+
+// backup2 backs the input stream up two tokens.
+// The zeroth token is already there.
+func (t *Tree) backup2(t1 item) {
+ t.token[1] = t1
+ t.peekCount = 2
+}
+
+// backup3 backs the input stream up three tokens
+// The zeroth token is already there.
+func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back.
+ t.token[1] = t1
+ t.token[2] = t2
+ t.peekCount = 3
+}
+
+// peek returns but does not consume the next token.
+func (t *Tree) peek() item {
+ if t.peekCount > 0 {
+ return t.token[t.peekCount-1]
+ }
+ t.peekCount = 1
+ t.token[0] = t.lex.nextItem()
+ return t.token[0]
+}
+
+// nextNonSpace returns the next non-space token.
+func (t *Tree) nextNonSpace() (token item) {
+ for {
+ token = t.next()
+ if token.typ != itemSpace {
+ break
+ }
+ }
+ return token
+}
+
+// peekNonSpace returns but does not consume the next non-space token.
+func (t *Tree) peekNonSpace() item {
+ token := t.nextNonSpace()
+ t.backup()
+ return token
+}
+
+// Parsing.
+
+// New allocates a new parse tree with the given name.
+func New(name string, funcs ...map[string]interface{}) *Tree {
+ return &Tree{
+ Name: name,
+ funcs: funcs,
+ }
+}
+
+// ErrorContext returns a textual representation of the location of the node in the input text.
+// The receiver is only used when the node does not have a pointer to the tree inside,
+// which can occur in old code.
+func (t *Tree) ErrorContext(n Node) (location, context string) {
+ pos := int(n.Position())
+ tree := n.tree()
+ if tree == nil {
+ tree = t
+ }
+ text := tree.text[:pos]
+ byteNum := strings.LastIndex(text, "\n")
+ if byteNum == -1 {
+ byteNum = pos // On first line.
+ } else {
+ byteNum++ // After the newline.
+ byteNum = pos - byteNum
+ }
+ lineNum := 1 + strings.Count(text, "\n")
+ context = n.String()
+ return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context
+}
+
+// errorf formats the error and terminates processing.
+func (t *Tree) errorf(format string, args ...interface{}) {
+ t.Root = nil
+ format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.token[0].line, format)
+ panic(fmt.Errorf(format, args...))
+}
+
+// error terminates processing.
+func (t *Tree) error(err error) {
+ t.errorf("%s", err)
+}
+
+// expect consumes the next token and guarantees it has the required type.
+func (t *Tree) expect(expected itemType, context string) item {
+ token := t.nextNonSpace()
+ if token.typ != expected {
+ t.unexpected(token, context)
+ }
+ return token
+}
+
+// expectOneOf consumes the next token and guarantees it has one of the required types.
+func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item {
+ token := t.nextNonSpace()
+ if token.typ != expected1 && token.typ != expected2 {
+ t.unexpected(token, context)
+ }
+ return token
+}
+
+// unexpected complains about the token and terminates processing.
+func (t *Tree) unexpected(token item, context string) {
+ if token.typ == itemError {
+ extra := ""
+ if t.actionLine != 0 && t.actionLine != token.line {
+ extra = fmt.Sprintf(" in action started at %s:%d", t.ParseName, t.actionLine)
+ if strings.HasSuffix(token.val, " action") {
+ extra = extra[len(" in action"):] // avoid "action in action"
+ }
+ }
+ t.errorf("%s%s", token, extra)
+ }
+ t.errorf("unexpected %s in %s", token, context)
+}
+
+// recover is the handler that turns panics into returns from the top level of Parse.
+func (t *Tree) recover(errp *error) {
+ e := recover()
+ if e != nil {
+ if _, ok := e.(runtime.Error); ok {
+ panic(e)
+ }
+ if t != nil {
+ t.lex.drain()
+ t.stopParse()
+ }
+ *errp = e.(error)
+ }
+}
+
+// startParse initializes the parser, using the lexer.
+func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer, treeSet map[string]*Tree) {
+ t.Root = nil
+ t.lex = lex
+ t.vars = []string{"$"}
+ t.funcs = funcs
+ t.treeSet = treeSet
+}
+
+// stopParse terminates parsing.
+func (t *Tree) stopParse() {
+ t.lex = nil
+ t.vars = nil
+ t.funcs = nil
+ t.treeSet = nil
+}
+
+// Parse parses the template definition string to construct a representation of
+// the template for execution. If either action delimiter string is empty, the
+// default ("{{" or "}}") is used. Embedded template definitions are added to
+// the treeSet map.
+func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) {
+ defer t.recover(&err)
+ t.ParseName = t.Name
+ emitComment := t.Mode&ParseComments != 0
+ t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim, emitComment), treeSet)
+ t.text = text
+ t.parse()
+ t.add()
+ t.stopParse()
+ return t, nil
+}
+
+// add adds tree to t.treeSet.
+func (t *Tree) add() {
+ tree := t.treeSet[t.Name]
+ if tree == nil || IsEmptyTree(tree.Root) {
+ t.treeSet[t.Name] = t
+ return
+ }
+ if !IsEmptyTree(t.Root) {
+ t.errorf("template: multiple definition of template %q", t.Name)
+ }
+}
+
+// IsEmptyTree reports whether this tree (node) is empty of everything but space or comments.
+func IsEmptyTree(n Node) bool {
+ switch n := n.(type) {
+ case nil:
+ return true
+ case *ActionNode:
+ case *CommentNode:
+ return true
+ case *IfNode:
+ case *ListNode:
+ for _, node := range n.Nodes {
+ if !IsEmptyTree(node) {
+ return false
+ }
+ }
+ return true
+ case *RangeNode:
+ case *TemplateNode:
+ case *TextNode:
+ return len(bytes.TrimSpace(n.Text)) == 0
+ case *WithNode:
+ default:
+ panic("unknown node: " + n.String())
+ }
+ return false
+}
+
+// parse is the top-level parser for a template, essentially the same
+// as itemList except it also parses {{define}} actions.
+// It runs to EOF.
+func (t *Tree) parse() {
+ t.Root = t.newList(t.peek().pos)
+ for t.peek().typ != itemEOF {
+ if t.peek().typ == itemLeftDelim {
+ delim := t.next()
+ if t.nextNonSpace().typ == itemDefine {
+ newT := New("definition") // name will be updated once we know it.
+ newT.text = t.text
+ newT.Mode = t.Mode
+ newT.ParseName = t.ParseName
+ newT.startParse(t.funcs, t.lex, t.treeSet)
+ newT.parseDefinition()
+ continue
+ }
+ t.backup2(delim)
+ }
+ switch n := t.textOrAction(); n.Type() {
+ case nodeEnd, nodeElse:
+ t.errorf("unexpected %s", n)
+ default:
+ t.Root.append(n)
+ }
+ }
+}
+
+// parseDefinition parses a {{define}} ... {{end}} template definition and
+// installs the definition in t.treeSet. The "define" keyword has already
+// been scanned.
+func (t *Tree) parseDefinition() {
+ const context = "define clause"
+ name := t.expectOneOf(itemString, itemRawString, context)
+ var err error
+ t.Name, err = strconv.Unquote(name.val)
+ if err != nil {
+ t.error(err)
+ }
+ t.expect(itemRightDelim, context)
+ var end Node
+ t.Root, end = t.itemList()
+ if end.Type() != nodeEnd {
+ t.errorf("unexpected %s in %s", end, context)
+ }
+ t.add()
+ t.stopParse()
+}
+
+// itemList:
+// textOrAction*
+// Terminates at {{end}} or {{else}}, returned separately.
+func (t *Tree) itemList() (list *ListNode, next Node) {
+ list = t.newList(t.peekNonSpace().pos)
+ for t.peekNonSpace().typ != itemEOF {
+ n := t.textOrAction()
+ switch n.Type() {
+ case nodeEnd, nodeElse:
+ return list, n
+ }
+ list.append(n)
+ }
+ t.errorf("unexpected EOF")
+ return
+}
+
+// textOrAction:
+// text | comment | action
+func (t *Tree) textOrAction() Node {
+ switch token := t.nextNonSpace(); token.typ {
+ case itemText:
+ return t.newText(token.pos, token.val)
+ case itemLeftDelim:
+ t.actionLine = token.line
+ defer t.clearActionLine()
+ return t.action()
+ case itemComment:
+ return t.newComment(token.pos, token.val)
+ default:
+ t.unexpected(token, "input")
+ }
+ return nil
+}
+
+func (t *Tree) clearActionLine() {
+ t.actionLine = 0
+}
+
+// Action:
+// control
+// command ("|" command)*
+// Left delim is past. Now get actions.
+// First word could be a keyword such as range.
+func (t *Tree) action() (n Node) {
+ switch token := t.nextNonSpace(); token.typ {
+ case itemBlock:
+ return t.blockControl()
+ case itemElse:
+ return t.elseControl()
+ case itemEnd:
+ return t.endControl()
+ case itemIf:
+ return t.ifControl()
+ case itemRange:
+ return t.rangeControl()
+ case itemTemplate:
+ return t.templateControl()
+ case itemWith:
+ return t.withControl()
+ }
+ t.backup()
+ token := t.peek()
+ // Do not pop variables; they persist until "end".
+ return t.newAction(token.pos, token.line, t.pipeline("command", itemRightDelim))
+}
+
+// Pipeline:
+// declarations? command ('|' command)*
+func (t *Tree) pipeline(context string, end itemType) (pipe *PipeNode) {
+ token := t.peekNonSpace()
+ pipe = t.newPipeline(token.pos, token.line, nil)
+ // Are there declarations or assignments?
+decls:
+ if v := t.peekNonSpace(); v.typ == itemVariable {
+ t.next()
+ // Since space is a token, we need 3-token look-ahead here in the worst case:
+ // in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an
+ // argument variable rather than a declaration. So remember the token
+ // adjacent to the variable so we can push it back if necessary.
+ tokenAfterVariable := t.peek()
+ next := t.peekNonSpace()
+ switch {
+ case next.typ == itemAssign, next.typ == itemDeclare:
+ pipe.IsAssign = next.typ == itemAssign
+ t.nextNonSpace()
+ pipe.Decl = append(pipe.Decl, t.newVariable(v.pos, v.val))
+ t.vars = append(t.vars, v.val)
+ case next.typ == itemChar && next.val == ",":
+ t.nextNonSpace()
+ pipe.Decl = append(pipe.Decl, t.newVariable(v.pos, v.val))
+ t.vars = append(t.vars, v.val)
+ if context == "range" && len(pipe.Decl) < 2 {
+ switch t.peekNonSpace().typ {
+ case itemVariable, itemRightDelim, itemRightParen:
+ // second initialized variable in a range pipeline
+ goto decls
+ default:
+ t.errorf("range can only initialize variables")
+ }
+ }
+ t.errorf("too many declarations in %s", context)
+ case tokenAfterVariable.typ == itemSpace:
+ t.backup3(v, tokenAfterVariable)
+ default:
+ t.backup2(v)
+ }
+ }
+ for {
+ switch token := t.nextNonSpace(); token.typ {
+ case end:
+ // At this point, the pipeline is complete
+ t.checkPipeline(pipe, context)
+ return
+ case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier,
+ itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen:
+ t.backup()
+ pipe.append(t.command())
+ default:
+ t.unexpected(token, context)
+ }
+ }
+}
+
+func (t *Tree) checkPipeline(pipe *PipeNode, context string) {
+ // Reject empty pipelines
+ if len(pipe.Cmds) == 0 {
+ t.errorf("missing value for %s", context)
+ }
+ // Only the first command of a pipeline can start with a non executable operand
+ for i, c := range pipe.Cmds[1:] {
+ switch c.Args[0].Type() {
+ case NodeBool, NodeDot, NodeNil, NodeNumber, NodeString:
+ // With A|B|C, pipeline stage 2 is B
+ t.errorf("non executable command in pipeline stage %d", i+2)
+ }
+ }
+}
+
+func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) {
+ defer t.popVars(len(t.vars))
+ pipe = t.pipeline(context, itemRightDelim)
+ var next Node
+ list, next = t.itemList()
+ switch next.Type() {
+ case nodeEnd: //done
+ case nodeElse:
+ if allowElseIf {
+ // Special case for "else if". If the "else" is followed immediately by an "if",
+ // the elseControl will have left the "if" token pending. Treat
+ // {{if a}}_{{else if b}}_{{end}}
+ // as
+ // {{if a}}_{{else}}{{if b}}_{{end}}{{end}}.
+ // To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}}
+ // is assumed. This technique works even for long if-else-if chains.
+ // TODO: Should we allow else-if in with and range?
+ if t.peek().typ == itemIf {
+ t.next() // Consume the "if" token.
+ elseList = t.newList(next.Position())
+ elseList.append(t.ifControl())
+ // Do not consume the next item - only one {{end}} required.
+ break
+ }
+ }
+ elseList, next = t.itemList()
+ if next.Type() != nodeEnd {
+ t.errorf("expected end; found %s", next)
+ }
+ }
+ return pipe.Position(), pipe.Line, pipe, list, elseList
+}
+
+// If:
+// {{if pipeline}} itemList {{end}}
+// {{if pipeline}} itemList {{else}} itemList {{end}}
+// If keyword is past.
+func (t *Tree) ifControl() Node {
+ return t.newIf(t.parseControl(true, "if"))
+}
+
+// Range:
+// {{range pipeline}} itemList {{end}}
+// {{range pipeline}} itemList {{else}} itemList {{end}}
+// Range keyword is past.
+func (t *Tree) rangeControl() Node {
+ return t.newRange(t.parseControl(false, "range"))
+}
+
+// With:
+// {{with pipeline}} itemList {{end}}
+// {{with pipeline}} itemList {{else}} itemList {{end}}
+// If keyword is past.
+func (t *Tree) withControl() Node {
+ return t.newWith(t.parseControl(false, "with"))
+}
+
+// End:
+// {{end}}
+// End keyword is past.
+func (t *Tree) endControl() Node {
+ return t.newEnd(t.expect(itemRightDelim, "end").pos)
+}
+
+// Else:
+// {{else}}
+// Else keyword is past.
+func (t *Tree) elseControl() Node {
+ // Special case for "else if".
+ peek := t.peekNonSpace()
+ if peek.typ == itemIf {
+ // We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ".
+ return t.newElse(peek.pos, peek.line)
+ }
+ token := t.expect(itemRightDelim, "else")
+ return t.newElse(token.pos, token.line)
+}
+
+// Block:
+// {{block stringValue pipeline}}
+// Block keyword is past.
+// The name must be something that can evaluate to a string.
+// The pipeline is mandatory.
+func (t *Tree) blockControl() Node {
+ const context = "block clause"
+
+ token := t.nextNonSpace()
+ name := t.parseTemplateName(token, context)
+ pipe := t.pipeline(context, itemRightDelim)
+
+ block := New(name) // name will be updated once we know it.
+ block.text = t.text
+ block.Mode = t.Mode
+ block.ParseName = t.ParseName
+ block.startParse(t.funcs, t.lex, t.treeSet)
+ var end Node
+ block.Root, end = block.itemList()
+ if end.Type() != nodeEnd {
+ t.errorf("unexpected %s in %s", end, context)
+ }
+ block.add()
+ block.stopParse()
+
+ return t.newTemplate(token.pos, token.line, name, pipe)
+}
+
+// Template:
+// {{template stringValue pipeline}}
+// Template keyword is past. The name must be something that can evaluate
+// to a string.
+func (t *Tree) templateControl() Node {
+ const context = "template clause"
+ token := t.nextNonSpace()
+ name := t.parseTemplateName(token, context)
+ var pipe *PipeNode
+ if t.nextNonSpace().typ != itemRightDelim {
+ t.backup()
+ // Do not pop variables; they persist until "end".
+ pipe = t.pipeline(context, itemRightDelim)
+ }
+ return t.newTemplate(token.pos, token.line, name, pipe)
+}
+
+func (t *Tree) parseTemplateName(token item, context string) (name string) {
+ switch token.typ {
+ case itemString, itemRawString:
+ s, err := strconv.Unquote(token.val)
+ if err != nil {
+ t.error(err)
+ }
+ name = s
+ default:
+ t.unexpected(token, context)
+ }
+ return
+}
+
+// command:
+// operand (space operand)*
+// space-separated arguments up to a pipeline character or right delimiter.
+// we consume the pipe character but leave the right delim to terminate the action.
+func (t *Tree) command() *CommandNode {
+ cmd := t.newCommand(t.peekNonSpace().pos)
+ for {
+ t.peekNonSpace() // skip leading spaces.
+ operand := t.operand()
+ if operand != nil {
+ cmd.append(operand)
+ }
+ switch token := t.next(); token.typ {
+ case itemSpace:
+ continue
+ case itemRightDelim, itemRightParen:
+ t.backup()
+ case itemPipe:
+ // nothing here; break loop below
+ default:
+ t.unexpected(token, "operand")
+ }
+ break
+ }
+ if len(cmd.Args) == 0 {
+ t.errorf("empty command")
+ }
+ return cmd
+}
+
+// operand:
+// term .Field*
+// An operand is a space-separated component of a command,
+// a term possibly followed by field accesses.
+// A nil return means the next item is not an operand.
+func (t *Tree) operand() Node {
+ node := t.term()
+ if node == nil {
+ return nil
+ }
+ if t.peek().typ == itemField {
+ chain := t.newChain(t.peek().pos, node)
+ for t.peek().typ == itemField {
+ chain.Add(t.next().val)
+ }
+ // Compatibility with original API: If the term is of type NodeField
+ // or NodeVariable, just put more fields on the original.
+ // Otherwise, keep the Chain node.
+ // Obvious parsing errors involving literal values are detected here.
+ // More complex error cases will have to be handled at execution time.
+ switch node.Type() {
+ case NodeField:
+ node = t.newField(chain.Position(), chain.String())
+ case NodeVariable:
+ node = t.newVariable(chain.Position(), chain.String())
+ case NodeBool, NodeString, NodeNumber, NodeNil, NodeDot:
+ t.errorf("unexpected . after term %q", node.String())
+ default:
+ node = chain
+ }
+ }
+ return node
+}
+
+// term:
+// literal (number, string, nil, boolean)
+// function (identifier)
+// .
+// .Field
+// $
+// '(' pipeline ')'
+// A term is a simple "expression".
+// A nil return means the next item is not a term.
+func (t *Tree) term() Node {
+ switch token := t.nextNonSpace(); token.typ {
+ case itemIdentifier:
+ if !t.hasFunction(token.val) {
+ t.errorf("function %q not defined", token.val)
+ }
+ return NewIdentifier(token.val).SetTree(t).SetPos(token.pos)
+ case itemDot:
+ return t.newDot(token.pos)
+ case itemNil:
+ return t.newNil(token.pos)
+ case itemVariable:
+ return t.useVar(token.pos, token.val)
+ case itemField:
+ return t.newField(token.pos, token.val)
+ case itemBool:
+ return t.newBool(token.pos, token.val == "true")
+ case itemCharConstant, itemComplex, itemNumber:
+ number, err := t.newNumber(token.pos, token.val, token.typ)
+ if err != nil {
+ t.error(err)
+ }
+ return number
+ case itemLeftParen:
+ return t.pipeline("parenthesized pipeline", itemRightParen)
+ case itemString, itemRawString:
+ s, err := strconv.Unquote(token.val)
+ if err != nil {
+ t.error(err)
+ }
+ return t.newString(token.pos, token.val, s)
+ }
+ t.backup()
+ return nil
+}
+
+// hasFunction reports if a function name exists in the Tree's maps.
+func (t *Tree) hasFunction(name string) bool {
+ for _, funcMap := range t.funcs {
+ if funcMap == nil {
+ continue
+ }
+ if funcMap[name] != nil {
+ return true
+ }
+ }
+ return false
+}
+
+// popVars trims the variable list to the specified length
+func (t *Tree) popVars(n int) {
+ t.vars = t.vars[:n]
+}
+
+// useVar returns a node for a variable reference. It errors if the
+// variable is not defined.
+func (t *Tree) useVar(pos Pos, name string) Node {
+ v := t.newVariable(pos, name)
+ for _, varName := range t.vars {
+ if varName == v.Ident[0] {
+ return v
+ }
+ }
+ t.errorf("undefined variable %q", v.Ident[0])
+ return nil
+}
diff --git a/src/text/template/parse/parse_test.go b/src/text/template/parse/parse_test.go
new file mode 100644
index 0000000..220f984
--- /dev/null
+++ b/src/text/template/parse/parse_test.go
@@ -0,0 +1,652 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package parse
+
+import (
+ "flag"
+ "fmt"
+ "strings"
+ "testing"
+)
+
+var debug = flag.Bool("debug", false, "show the errors produced by the main tests")
+
+type numberTest struct {
+ text string
+ isInt bool
+ isUint bool
+ isFloat bool
+ isComplex bool
+ int64
+ uint64
+ float64
+ complex128
+}
+
+var numberTests = []numberTest{
+ // basics
+ {"0", true, true, true, false, 0, 0, 0, 0},
+ {"-0", true, true, true, false, 0, 0, 0, 0}, // check that -0 is a uint.
+ {"73", true, true, true, false, 73, 73, 73, 0},
+ {"7_3", true, true, true, false, 73, 73, 73, 0},
+ {"0b10_010_01", true, true, true, false, 73, 73, 73, 0},
+ {"0B10_010_01", true, true, true, false, 73, 73, 73, 0},
+ {"073", true, true, true, false, 073, 073, 073, 0},
+ {"0o73", true, true, true, false, 073, 073, 073, 0},
+ {"0O73", true, true, true, false, 073, 073, 073, 0},
+ {"0x73", true, true, true, false, 0x73, 0x73, 0x73, 0},
+ {"0X73", true, true, true, false, 0x73, 0x73, 0x73, 0},
+ {"0x7_3", true, true, true, false, 0x73, 0x73, 0x73, 0},
+ {"-73", true, false, true, false, -73, 0, -73, 0},
+ {"+73", true, false, true, false, 73, 0, 73, 0},
+ {"100", true, true, true, false, 100, 100, 100, 0},
+ {"1e9", true, true, true, false, 1e9, 1e9, 1e9, 0},
+ {"-1e9", true, false, true, false, -1e9, 0, -1e9, 0},
+ {"-1.2", false, false, true, false, 0, 0, -1.2, 0},
+ {"1e19", false, true, true, false, 0, 1e19, 1e19, 0},
+ {"1e1_9", false, true, true, false, 0, 1e19, 1e19, 0},
+ {"1E19", false, true, true, false, 0, 1e19, 1e19, 0},
+ {"-1e19", false, false, true, false, 0, 0, -1e19, 0},
+ {"0x_1p4", true, true, true, false, 16, 16, 16, 0},
+ {"0X_1P4", true, true, true, false, 16, 16, 16, 0},
+ {"0x_1p-4", false, false, true, false, 0, 0, 1 / 16., 0},
+ {"4i", false, false, false, true, 0, 0, 0, 4i},
+ {"-1.2+4.2i", false, false, false, true, 0, 0, 0, -1.2 + 4.2i},
+ {"073i", false, false, false, true, 0, 0, 0, 73i}, // not octal!
+ // complex with 0 imaginary are float (and maybe integer)
+ {"0i", true, true, true, true, 0, 0, 0, 0},
+ {"-1.2+0i", false, false, true, true, 0, 0, -1.2, -1.2},
+ {"-12+0i", true, false, true, true, -12, 0, -12, -12},
+ {"13+0i", true, true, true, true, 13, 13, 13, 13},
+ // funny bases
+ {"0123", true, true, true, false, 0123, 0123, 0123, 0},
+ {"-0x0", true, true, true, false, 0, 0, 0, 0},
+ {"0xdeadbeef", true, true, true, false, 0xdeadbeef, 0xdeadbeef, 0xdeadbeef, 0},
+ // character constants
+ {`'a'`, true, true, true, false, 'a', 'a', 'a', 0},
+ {`'\n'`, true, true, true, false, '\n', '\n', '\n', 0},
+ {`'\\'`, true, true, true, false, '\\', '\\', '\\', 0},
+ {`'\''`, true, true, true, false, '\'', '\'', '\'', 0},
+ {`'\xFF'`, true, true, true, false, 0xFF, 0xFF, 0xFF, 0},
+ {`'パ'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
+ {`'\u30d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
+ {`'\U000030d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
+ // some broken syntax
+ {text: "+-2"},
+ {text: "0x123."},
+ {text: "1e."},
+ {text: "0xi."},
+ {text: "1+2."},
+ {text: "'x"},
+ {text: "'xx'"},
+ {text: "'433937734937734969526500969526500'"}, // Integer too large - issue 10634.
+ // Issue 8622 - 0xe parsed as floating point. Very embarrassing.
+ {"0xef", true, true, true, false, 0xef, 0xef, 0xef, 0},
+}
+
+func TestNumberParse(t *testing.T) {
+ for _, test := range numberTests {
+ // If fmt.Sscan thinks it's complex, it's complex. We can't trust the output
+ // because imaginary comes out as a number.
+ var c complex128
+ typ := itemNumber
+ var tree *Tree
+ if test.text[0] == '\'' {
+ typ = itemCharConstant
+ } else {
+ _, err := fmt.Sscan(test.text, &c)
+ if err == nil {
+ typ = itemComplex
+ }
+ }
+ n, err := tree.newNumber(0, test.text, typ)
+ ok := test.isInt || test.isUint || test.isFloat || test.isComplex
+ if ok && err != nil {
+ t.Errorf("unexpected error for %q: %s", test.text, err)
+ continue
+ }
+ if !ok && err == nil {
+ t.Errorf("expected error for %q", test.text)
+ continue
+ }
+ if !ok {
+ if *debug {
+ fmt.Printf("%s\n\t%s\n", test.text, err)
+ }
+ continue
+ }
+ if n.IsComplex != test.isComplex {
+ t.Errorf("complex incorrect for %q; should be %t", test.text, test.isComplex)
+ }
+ if test.isInt {
+ if !n.IsInt {
+ t.Errorf("expected integer for %q", test.text)
+ }
+ if n.Int64 != test.int64 {
+ t.Errorf("int64 for %q should be %d Is %d", test.text, test.int64, n.Int64)
+ }
+ } else if n.IsInt {
+ t.Errorf("did not expect integer for %q", test.text)
+ }
+ if test.isUint {
+ if !n.IsUint {
+ t.Errorf("expected unsigned integer for %q", test.text)
+ }
+ if n.Uint64 != test.uint64 {
+ t.Errorf("uint64 for %q should be %d Is %d", test.text, test.uint64, n.Uint64)
+ }
+ } else if n.IsUint {
+ t.Errorf("did not expect unsigned integer for %q", test.text)
+ }
+ if test.isFloat {
+ if !n.IsFloat {
+ t.Errorf("expected float for %q", test.text)
+ }
+ if n.Float64 != test.float64 {
+ t.Errorf("float64 for %q should be %g Is %g", test.text, test.float64, n.Float64)
+ }
+ } else if n.IsFloat {
+ t.Errorf("did not expect float for %q", test.text)
+ }
+ if test.isComplex {
+ if !n.IsComplex {
+ t.Errorf("expected complex for %q", test.text)
+ }
+ if n.Complex128 != test.complex128 {
+ t.Errorf("complex128 for %q should be %g Is %g", test.text, test.complex128, n.Complex128)
+ }
+ } else if n.IsComplex {
+ t.Errorf("did not expect complex for %q", test.text)
+ }
+ }
+}
+
+type parseTest struct {
+ name string
+ input string
+ ok bool
+ result string // what the user would see in an error message.
+}
+
+const (
+ noError = true
+ hasError = false
+)
+
+var parseTests = []parseTest{
+ {"empty", "", noError,
+ ``},
+ {"comment", "{{/*\n\n\n*/}}", noError,
+ ``},
+ {"spaces", " \t\n", noError,
+ `" \t\n"`},
+ {"text", "some text", noError,
+ `"some text"`},
+ {"emptyAction", "{{}}", hasError,
+ `{{}}`},
+ {"field", "{{.X}}", noError,
+ `{{.X}}`},
+ {"simple command", "{{printf}}", noError,
+ `{{printf}}`},
+ {"$ invocation", "{{$}}", noError,
+ "{{$}}"},
+ {"variable invocation", "{{with $x := 3}}{{$x 23}}{{end}}", noError,
+ "{{with $x := 3}}{{$x 23}}{{end}}"},
+ {"variable with fields", "{{$.I}}", noError,
+ "{{$.I}}"},
+ {"multi-word command", "{{printf `%d` 23}}", noError,
+ "{{printf `%d` 23}}"},
+ {"pipeline", "{{.X|.Y}}", noError,
+ `{{.X | .Y}}`},
+ {"pipeline with decl", "{{$x := .X|.Y}}", noError,
+ `{{$x := .X | .Y}}`},
+ {"nested pipeline", "{{.X (.Y .Z) (.A | .B .C) (.E)}}", noError,
+ `{{.X (.Y .Z) (.A | .B .C) (.E)}}`},
+ {"field applied to parentheses", "{{(.Y .Z).Field}}", noError,
+ `{{(.Y .Z).Field}}`},
+ {"simple if", "{{if .X}}hello{{end}}", noError,
+ `{{if .X}}"hello"{{end}}`},
+ {"if with else", "{{if .X}}true{{else}}false{{end}}", noError,
+ `{{if .X}}"true"{{else}}"false"{{end}}`},
+ {"if with else if", "{{if .X}}true{{else if .Y}}false{{end}}", noError,
+ `{{if .X}}"true"{{else}}{{if .Y}}"false"{{end}}{{end}}`},
+ {"if else chain", "+{{if .X}}X{{else if .Y}}Y{{else if .Z}}Z{{end}}+", noError,
+ `"+"{{if .X}}"X"{{else}}{{if .Y}}"Y"{{else}}{{if .Z}}"Z"{{end}}{{end}}{{end}}"+"`},
+ {"simple range", "{{range .X}}hello{{end}}", noError,
+ `{{range .X}}"hello"{{end}}`},
+ {"chained field range", "{{range .X.Y.Z}}hello{{end}}", noError,
+ `{{range .X.Y.Z}}"hello"{{end}}`},
+ {"nested range", "{{range .X}}hello{{range .Y}}goodbye{{end}}{{end}}", noError,
+ `{{range .X}}"hello"{{range .Y}}"goodbye"{{end}}{{end}}`},
+ {"range with else", "{{range .X}}true{{else}}false{{end}}", noError,
+ `{{range .X}}"true"{{else}}"false"{{end}}`},
+ {"range over pipeline", "{{range .X|.M}}true{{else}}false{{end}}", noError,
+ `{{range .X | .M}}"true"{{else}}"false"{{end}}`},
+ {"range []int", "{{range .SI}}{{.}}{{end}}", noError,
+ `{{range .SI}}{{.}}{{end}}`},
+ {"range 1 var", "{{range $x := .SI}}{{.}}{{end}}", noError,
+ `{{range $x := .SI}}{{.}}{{end}}`},
+ {"range 2 vars", "{{range $x, $y := .SI}}{{.}}{{end}}", noError,
+ `{{range $x, $y := .SI}}{{.}}{{end}}`},
+ {"constants", "{{range .SI 1 -3.2i true false 'a' nil}}{{end}}", noError,
+ `{{range .SI 1 -3.2i true false 'a' nil}}{{end}}`},
+ {"template", "{{template `x`}}", noError,
+ `{{template "x"}}`},
+ {"template with arg", "{{template `x` .Y}}", noError,
+ `{{template "x" .Y}}`},
+ {"with", "{{with .X}}hello{{end}}", noError,
+ `{{with .X}}"hello"{{end}}`},
+ {"with with else", "{{with .X}}hello{{else}}goodbye{{end}}", noError,
+ `{{with .X}}"hello"{{else}}"goodbye"{{end}}`},
+ // Trimming spaces.
+ {"trim left", "x \r\n\t{{- 3}}", noError, `"x"{{3}}`},
+ {"trim right", "{{3 -}}\n\n\ty", noError, `{{3}}"y"`},
+ {"trim left and right", "x \r\n\t{{- 3 -}}\n\n\ty", noError, `"x"{{3}}"y"`},
+ {"trim with extra spaces", "x\n{{- 3 -}}\ny", noError, `"x"{{3}}"y"`},
+ {"comment trim left", "x \r\n\t{{- /* hi */}}", noError, `"x"`},
+ {"comment trim right", "{{/* hi */ -}}\n\n\ty", noError, `"y"`},
+ {"comment trim left and right", "x \r\n\t{{- /* */ -}}\n\n\ty", noError, `"x""y"`},
+ {"block definition", `{{block "foo" .}}hello{{end}}`, noError,
+ `{{template "foo" .}}`},
+
+ {"newline in assignment", "{{ $x \n := \n 1 \n }}", noError, "{{$x := 1}}"},
+ {"newline in empty action", "{{\n}}", hasError, "{{\n}}"},
+ {"newline in pipeline", "{{\n\"x\"\n|\nprintf\n}}", noError, `{{"x" | printf}}`},
+ {"newline in comment", "{{/*\nhello\n*/}}", noError, ""},
+ {"newline in comment", "{{-\n/*\nhello\n*/\n-}}", noError, ""},
+
+ // Errors.
+ {"unclosed action", "hello{{range", hasError, ""},
+ {"unmatched end", "{{end}}", hasError, ""},
+ {"unmatched else", "{{else}}", hasError, ""},
+ {"unmatched else after if", "{{if .X}}hello{{end}}{{else}}", hasError, ""},
+ {"multiple else", "{{if .X}}1{{else}}2{{else}}3{{end}}", hasError, ""},
+ {"missing end", "hello{{range .x}}", hasError, ""},
+ {"missing end after else", "hello{{range .x}}{{else}}", hasError, ""},
+ {"undefined function", "hello{{undefined}}", hasError, ""},
+ {"undefined variable", "{{$x}}", hasError, ""},
+ {"variable undefined after end", "{{with $x := 4}}{{end}}{{$x}}", hasError, ""},
+ {"variable undefined in template", "{{template $v}}", hasError, ""},
+ {"declare with field", "{{with $x.Y := 4}}{{end}}", hasError, ""},
+ {"template with field ref", "{{template .X}}", hasError, ""},
+ {"template with var", "{{template $v}}", hasError, ""},
+ {"invalid punctuation", "{{printf 3, 4}}", hasError, ""},
+ {"multidecl outside range", "{{with $v, $u := 3}}{{end}}", hasError, ""},
+ {"too many decls in range", "{{range $u, $v, $w := 3}}{{end}}", hasError, ""},
+ {"dot applied to parentheses", "{{printf (printf .).}}", hasError, ""},
+ {"adjacent args", "{{printf 3`x`}}", hasError, ""},
+ {"adjacent args with .", "{{printf `x`.}}", hasError, ""},
+ {"extra end after if", "{{if .X}}a{{else if .Y}}b{{end}}{{end}}", hasError, ""},
+ // Other kinds of assignments and operators aren't available yet.
+ {"bug0a", "{{$x := 0}}{{$x}}", noError, "{{$x := 0}}{{$x}}"},
+ {"bug0b", "{{$x += 1}}{{$x}}", hasError, ""},
+ {"bug0c", "{{$x ! 2}}{{$x}}", hasError, ""},
+ {"bug0d", "{{$x % 3}}{{$x}}", hasError, ""},
+ // Check the parse fails for := rather than comma.
+ {"bug0e", "{{range $x := $y := 3}}{{end}}", hasError, ""},
+ // Another bug: variable read must ignore following punctuation.
+ {"bug1a", "{{$x:=.}}{{$x!2}}", hasError, ""}, // ! is just illegal here.
+ {"bug1b", "{{$x:=.}}{{$x+2}}", hasError, ""}, // $x+2 should not parse as ($x) (+2).
+ {"bug1c", "{{$x:=.}}{{$x +2}}", noError, "{{$x := .}}{{$x +2}}"}, // It's OK with a space.
+ // dot following a literal value
+ {"dot after integer", "{{1.E}}", hasError, ""},
+ {"dot after float", "{{0.1.E}}", hasError, ""},
+ {"dot after boolean", "{{true.E}}", hasError, ""},
+ {"dot after char", "{{'a'.any}}", hasError, ""},
+ {"dot after string", `{{"hello".guys}}`, hasError, ""},
+ {"dot after dot", "{{..E}}", hasError, ""},
+ {"dot after nil", "{{nil.E}}", hasError, ""},
+ // Wrong pipeline
+ {"wrong pipeline dot", "{{12|.}}", hasError, ""},
+ {"wrong pipeline number", "{{.|12|printf}}", hasError, ""},
+ {"wrong pipeline string", "{{.|printf|\"error\"}}", hasError, ""},
+ {"wrong pipeline char", "{{12|printf|'e'}}", hasError, ""},
+ {"wrong pipeline boolean", "{{.|true}}", hasError, ""},
+ {"wrong pipeline nil", "{{'c'|nil}}", hasError, ""},
+ {"empty pipeline", `{{printf "%d" ( ) }}`, hasError, ""},
+ // Missing pipeline in block
+ {"block definition", `{{block "foo"}}hello{{end}}`, hasError, ""},
+}
+
+var builtins = map[string]interface{}{
+ "printf": fmt.Sprintf,
+ "contains": strings.Contains,
+}
+
+func testParse(doCopy bool, t *testing.T) {
+ textFormat = "%q"
+ defer func() { textFormat = "%s" }()
+ for _, test := range parseTests {
+ tmpl, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree), builtins)
+ switch {
+ case err == nil && !test.ok:
+ t.Errorf("%q: expected error; got none", test.name)
+ continue
+ case err != nil && test.ok:
+ t.Errorf("%q: unexpected error: %v", test.name, err)
+ continue
+ case err != nil && !test.ok:
+ // expected error, got one
+ if *debug {
+ fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
+ }
+ continue
+ }
+ var result string
+ if doCopy {
+ result = tmpl.Root.Copy().String()
+ } else {
+ result = tmpl.Root.String()
+ }
+ if result != test.result {
+ t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.result)
+ }
+ }
+}
+
+func TestParse(t *testing.T) {
+ testParse(false, t)
+}
+
+// Same as TestParse, but we copy the node first
+func TestParseCopy(t *testing.T) {
+ testParse(true, t)
+}
+
+func TestParseWithComments(t *testing.T) {
+ textFormat = "%q"
+ defer func() { textFormat = "%s" }()
+ tests := [...]parseTest{
+ {"comment", "{{/*\n\n\n*/}}", noError, "{{/*\n\n\n*/}}"},
+ {"comment trim left", "x \r\n\t{{- /* hi */}}", noError, `"x"{{/* hi */}}`},
+ {"comment trim right", "{{/* hi */ -}}\n\n\ty", noError, `{{/* hi */}}"y"`},
+ {"comment trim left and right", "x \r\n\t{{- /* */ -}}\n\n\ty", noError, `"x"{{/* */}}"y"`},
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ tr := New(test.name)
+ tr.Mode = ParseComments
+ tmpl, err := tr.Parse(test.input, "", "", make(map[string]*Tree))
+ if err != nil {
+ t.Errorf("%q: expected error; got none", test.name)
+ }
+ if result := tmpl.Root.String(); result != test.result {
+ t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.result)
+ }
+ })
+ }
+}
+
+type isEmptyTest struct {
+ name string
+ input string
+ empty bool
+}
+
+var isEmptyTests = []isEmptyTest{
+ {"empty", ``, true},
+ {"nonempty", `hello`, false},
+ {"spaces only", " \t\n \t\n", true},
+ {"comment only", "{{/* comment */}}", true},
+ {"definition", `{{define "x"}}something{{end}}`, true},
+ {"definitions and space", "{{define `x`}}something{{end}}\n\n{{define `y`}}something{{end}}\n\n", true},
+ {"definitions and text", "{{define `x`}}something{{end}}\nx\n{{define `y`}}something{{end}}\ny\n", false},
+ {"definition and action", "{{define `x`}}something{{end}}{{if 3}}foo{{end}}", false},
+}
+
+func TestIsEmpty(t *testing.T) {
+ if !IsEmptyTree(nil) {
+ t.Errorf("nil tree is not empty")
+ }
+ for _, test := range isEmptyTests {
+ tree, err := New("root").Parse(test.input, "", "", make(map[string]*Tree), nil)
+ if err != nil {
+ t.Errorf("%q: unexpected error: %v", test.name, err)
+ continue
+ }
+ if empty := IsEmptyTree(tree.Root); empty != test.empty {
+ t.Errorf("%q: expected %t got %t", test.name, test.empty, empty)
+ }
+ }
+}
+
+func TestErrorContextWithTreeCopy(t *testing.T) {
+ tree, err := New("root").Parse("{{if true}}{{end}}", "", "", make(map[string]*Tree), nil)
+ if err != nil {
+ t.Fatalf("unexpected tree parse failure: %v", err)
+ }
+ treeCopy := tree.Copy()
+ wantLocation, wantContext := tree.ErrorContext(tree.Root.Nodes[0])
+ gotLocation, gotContext := treeCopy.ErrorContext(treeCopy.Root.Nodes[0])
+ if wantLocation != gotLocation {
+ t.Errorf("wrong error location want %q got %q", wantLocation, gotLocation)
+ }
+ if wantContext != gotContext {
+ t.Errorf("wrong error location want %q got %q", wantContext, gotContext)
+ }
+}
+
+// All failures, and the result is a string that must appear in the error message.
+var errorTests = []parseTest{
+ // Check line numbers are accurate.
+ {"unclosed1",
+ "line1\n{{",
+ hasError, `unclosed1:2: unclosed action`},
+ {"unclosed2",
+ "line1\n{{define `x`}}line2\n{{",
+ hasError, `unclosed2:3: unclosed action`},
+ {"unclosed3",
+ "line1\n{{\"x\"\n\"y\"\n",
+ hasError, `unclosed3:4: unclosed action started at unclosed3:2`},
+ {"unclosed4",
+ "{{\n\n\n\n\n",
+ hasError, `unclosed4:6: unclosed action started at unclosed4:1`},
+ {"var1",
+ "line1\n{{\nx\n}}",
+ hasError, `var1:3: function "x" not defined`},
+ // Specific errors.
+ {"function",
+ "{{foo}}",
+ hasError, `function "foo" not defined`},
+ {"comment1",
+ "{{/*}}",
+ hasError, `comment1:1: unclosed comment`},
+ {"comment2",
+ "{{/*\nhello\n}}",
+ hasError, `comment2:1: unclosed comment`},
+ {"lparen",
+ "{{.X (1 2 3}}",
+ hasError, `unclosed left paren`},
+ {"rparen",
+ "{{.X 1 2 3 ) }}",
+ hasError, `unexpected ")" in command`},
+ {"rparen2",
+ "{{(.X 1 2 3",
+ hasError, `unclosed action`},
+ {"space",
+ "{{`x`3}}",
+ hasError, `in operand`},
+ {"idchar",
+ "{{a#}}",
+ hasError, `'#'`},
+ {"charconst",
+ "{{'a}}",
+ hasError, `unterminated character constant`},
+ {"stringconst",
+ `{{"a}}`,
+ hasError, `unterminated quoted string`},
+ {"rawstringconst",
+ "{{`a}}",
+ hasError, `unterminated raw quoted string`},
+ {"number",
+ "{{0xi}}",
+ hasError, `number syntax`},
+ {"multidefine",
+ "{{define `a`}}a{{end}}{{define `a`}}b{{end}}",
+ hasError, `multiple definition of template`},
+ {"eof",
+ "{{range .X}}",
+ hasError, `unexpected EOF`},
+ {"variable",
+ // Declare $x so it's defined, to avoid that error, and then check we don't parse a declaration.
+ "{{$x := 23}}{{with $x.y := 3}}{{$x 23}}{{end}}",
+ hasError, `unexpected ":="`},
+ {"multidecl",
+ "{{$a,$b,$c := 23}}",
+ hasError, `too many declarations`},
+ {"undefvar",
+ "{{$a}}",
+ hasError, `undefined variable`},
+ {"wrongdot",
+ "{{true.any}}",
+ hasError, `unexpected . after term`},
+ {"wrongpipeline",
+ "{{12|false}}",
+ hasError, `non executable command in pipeline`},
+ {"emptypipeline",
+ `{{ ( ) }}`,
+ hasError, `missing value for parenthesized pipeline`},
+ {"multilinerawstring",
+ "{{ $v := `\n` }} {{",
+ hasError, `multilinerawstring:2: unclosed action`},
+ {"rangeundefvar",
+ "{{range $k}}{{end}}",
+ hasError, `undefined variable`},
+ {"rangeundefvars",
+ "{{range $k, $v}}{{end}}",
+ hasError, `undefined variable`},
+ {"rangemissingvalue1",
+ "{{range $k,}}{{end}}",
+ hasError, `missing value for range`},
+ {"rangemissingvalue2",
+ "{{range $k, $v := }}{{end}}",
+ hasError, `missing value for range`},
+ {"rangenotvariable1",
+ "{{range $k, .}}{{end}}",
+ hasError, `range can only initialize variables`},
+ {"rangenotvariable2",
+ "{{range $k, 123 := .}}{{end}}",
+ hasError, `range can only initialize variables`},
+}
+
+func TestErrors(t *testing.T) {
+ for _, test := range errorTests {
+ t.Run(test.name, func(t *testing.T) {
+ _, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree))
+ if err == nil {
+ t.Fatalf("expected error %q, got nil", test.result)
+ }
+ if !strings.Contains(err.Error(), test.result) {
+ t.Fatalf("error %q does not contain %q", err, test.result)
+ }
+ })
+ }
+}
+
+func TestBlock(t *testing.T) {
+ const (
+ input = `a{{block "inner" .}}bar{{.}}baz{{end}}b`
+ outer = `a{{template "inner" .}}b`
+ inner = `bar{{.}}baz`
+ )
+ treeSet := make(map[string]*Tree)
+ tmpl, err := New("outer").Parse(input, "", "", treeSet, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if g, w := tmpl.Root.String(), outer; g != w {
+ t.Errorf("outer template = %q, want %q", g, w)
+ }
+ inTmpl := treeSet["inner"]
+ if inTmpl == nil {
+ t.Fatal("block did not define template")
+ }
+ if g, w := inTmpl.Root.String(), inner; g != w {
+ t.Errorf("inner template = %q, want %q", g, w)
+ }
+}
+
+func TestLineNum(t *testing.T) {
+ const count = 100
+ text := strings.Repeat("{{printf 1234}}\n", count)
+ tree, err := New("bench").Parse(text, "", "", make(map[string]*Tree), builtins)
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Check the line numbers. Each line is an action containing a template, followed by text.
+ // That's two nodes per line.
+ nodes := tree.Root.Nodes
+ for i := 0; i < len(nodes); i += 2 {
+ line := 1 + i/2
+ // Action first.
+ action := nodes[i].(*ActionNode)
+ if action.Line != line {
+ t.Fatalf("line %d: action is line %d", line, action.Line)
+ }
+ pipe := action.Pipe
+ if pipe.Line != line {
+ t.Fatalf("line %d: pipe is line %d", line, pipe.Line)
+ }
+ }
+}
+
+func BenchmarkParseLarge(b *testing.B) {
+ text := strings.Repeat("{{1234}}\n", 10000)
+ for i := 0; i < b.N; i++ {
+ _, err := New("bench").Parse(text, "", "", make(map[string]*Tree), builtins)
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+var sinkv, sinkl string
+
+func BenchmarkVariableString(b *testing.B) {
+ v := &VariableNode{
+ Ident: []string{"$", "A", "BB", "CCC", "THIS_IS_THE_VARIABLE_BEING_PROCESSED"},
+ }
+ b.ResetTimer()
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ sinkv = v.String()
+ }
+ if sinkv == "" {
+ b.Fatal("Benchmark was not run")
+ }
+}
+
+func BenchmarkListString(b *testing.B) {
+ text := `
+{{(printf .Field1.Field2.Field3).Value}}
+{{$x := (printf .Field1.Field2.Field3).Value}}
+{{$y := (printf $x.Field1.Field2.Field3).Value}}
+{{$z := $y.Field1.Field2.Field3}}
+{{if contains $y $z}}
+ {{printf "%q" $y}}
+{{else}}
+ {{printf "%q" $x}}
+{{end}}
+{{with $z.Field1 | contains "boring"}}
+ {{printf "%q" . | printf "%s"}}
+{{else}}
+ {{printf "%d %d %d" 11 11 11}}
+ {{printf "%d %d %s" 22 22 $x.Field1.Field2.Field3 | printf "%s"}}
+ {{printf "%v" (contains $z.Field1.Field2 $y)}}
+{{end}}
+`
+ tree, err := New("bench").Parse(text, "", "", make(map[string]*Tree), builtins)
+ if err != nil {
+ b.Fatal(err)
+ }
+ b.ResetTimer()
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ sinkl = tree.Root.String()
+ }
+ if sinkl == "" {
+ b.Fatal("Benchmark was not run")
+ }
+}