summaryrefslogtreecommitdiffstats
path: root/src/cmd/doc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
commit73df946d56c74384511a194dd01dbe099584fd1a (patch)
treefd0bcea490dd81327ddfbb31e215439672c9a068 /src/cmd/doc
parentInitial commit. (diff)
downloadgolang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.tar.xz
golang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.zip
Adding upstream version 1.16.10.upstream/1.16.10upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--src/cmd/doc/dirs.go303
-rw-r--r--src/cmd/doc/doc_test.go1049
-rw-r--r--src/cmd/doc/main.go408
-rw-r--r--src/cmd/doc/pkg.go1085
-rw-r--r--src/cmd/doc/testdata/merge/aa.go7
-rw-r--r--src/cmd/doc/testdata/merge/bb.go7
-rw-r--r--src/cmd/doc/testdata/nested/empty/empty.go1
-rw-r--r--src/cmd/doc/testdata/nested/ignore.go4
-rw-r--r--src/cmd/doc/testdata/nested/nested/real.go4
-rw-r--r--src/cmd/doc/testdata/pkg.go233
10 files changed, 3101 insertions, 0 deletions
diff --git a/src/cmd/doc/dirs.go b/src/cmd/doc/dirs.go
new file mode 100644
index 0000000..661624c
--- /dev/null
+++ b/src/cmd/doc/dirs.go
@@ -0,0 +1,303 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+ exec "internal/execabs"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "sync"
+
+ "golang.org/x/mod/semver"
+)
+
+// A Dir describes a directory holding code by specifying
+// the expected import path and the file system directory.
+type Dir struct {
+ importPath string // import path for that dir
+ dir string // file system directory
+ inModule bool
+}
+
+// Dirs is a structure for scanning the directory tree.
+// Its Next method returns the next Go source directory it finds.
+// Although it can be used to scan the tree multiple times, it
+// only walks the tree once, caching the data it finds.
+type Dirs struct {
+ scan chan Dir // Directories generated by walk.
+ hist []Dir // History of reported Dirs.
+ offset int // Counter for Next.
+}
+
+var dirs Dirs
+
+// dirsInit starts the scanning of package directories in GOROOT and GOPATH. Any
+// extra paths passed to it are included in the channel.
+func dirsInit(extra ...Dir) {
+ dirs.hist = make([]Dir, 0, 1000)
+ dirs.hist = append(dirs.hist, extra...)
+ dirs.scan = make(chan Dir)
+ go dirs.walk(codeRoots())
+}
+
+// Reset puts the scan back at the beginning.
+func (d *Dirs) Reset() {
+ d.offset = 0
+}
+
+// Next returns the next directory in the scan. The boolean
+// is false when the scan is done.
+func (d *Dirs) Next() (Dir, bool) {
+ if d.offset < len(d.hist) {
+ dir := d.hist[d.offset]
+ d.offset++
+ return dir, true
+ }
+ dir, ok := <-d.scan
+ if !ok {
+ return Dir{}, false
+ }
+ d.hist = append(d.hist, dir)
+ d.offset++
+ return dir, ok
+}
+
+// walk walks the trees in GOROOT and GOPATH.
+func (d *Dirs) walk(roots []Dir) {
+ for _, root := range roots {
+ d.bfsWalkRoot(root)
+ }
+ close(d.scan)
+}
+
+// bfsWalkRoot walks a single directory hierarchy in breadth-first lexical order.
+// Each Go source directory it finds is delivered on d.scan.
+func (d *Dirs) bfsWalkRoot(root Dir) {
+ root.dir = filepath.Clean(root.dir) // because filepath.Join will do it anyway
+
+ // this is the queue of directories to examine in this pass.
+ this := []string{}
+ // next is the queue of directories to examine in the next pass.
+ next := []string{root.dir}
+
+ for len(next) > 0 {
+ this, next = next, this[0:0]
+ for _, dir := range this {
+ fd, err := os.Open(dir)
+ if err != nil {
+ log.Print(err)
+ continue
+ }
+ entries, err := fd.Readdir(0)
+ fd.Close()
+ if err != nil {
+ log.Print(err)
+ continue
+ }
+ hasGoFiles := false
+ for _, entry := range entries {
+ name := entry.Name()
+ // For plain files, remember if this directory contains any .go
+ // source files, but ignore them otherwise.
+ if !entry.IsDir() {
+ if !hasGoFiles && strings.HasSuffix(name, ".go") {
+ hasGoFiles = true
+ }
+ continue
+ }
+ // Entry is a directory.
+
+ // The go tool ignores directories starting with ., _, or named "testdata".
+ if name[0] == '.' || name[0] == '_' || name == "testdata" {
+ continue
+ }
+ // When in a module, ignore vendor directories and stop at module boundaries.
+ if root.inModule {
+ if name == "vendor" {
+ continue
+ }
+ if fi, err := os.Stat(filepath.Join(dir, name, "go.mod")); err == nil && !fi.IsDir() {
+ continue
+ }
+ }
+ // Remember this (fully qualified) directory for the next pass.
+ next = append(next, filepath.Join(dir, name))
+ }
+ if hasGoFiles {
+ // It's a candidate.
+ importPath := root.importPath
+ if len(dir) > len(root.dir) {
+ if importPath != "" {
+ importPath += "/"
+ }
+ importPath += filepath.ToSlash(dir[len(root.dir)+1:])
+ }
+ d.scan <- Dir{importPath, dir, root.inModule}
+ }
+ }
+
+ }
+}
+
+var testGOPATH = false // force GOPATH use for testing
+
+// codeRoots returns the code roots to search for packages.
+// In GOPATH mode this is GOROOT/src and GOPATH/src, with empty import paths.
+// In module mode, this is each module root, with an import path set to its module path.
+func codeRoots() []Dir {
+ codeRootsCache.once.Do(func() {
+ codeRootsCache.roots = findCodeRoots()
+ })
+ return codeRootsCache.roots
+}
+
+var codeRootsCache struct {
+ once sync.Once
+ roots []Dir
+}
+
+var usingModules bool
+
+func findCodeRoots() []Dir {
+ var list []Dir
+ if !testGOPATH {
+ // Check for use of modules by 'go env GOMOD',
+ // which reports a go.mod file path if modules are enabled.
+ stdout, _ := exec.Command("go", "env", "GOMOD").Output()
+ gomod := string(bytes.TrimSpace(stdout))
+
+ usingModules = len(gomod) > 0
+ if usingModules {
+ list = append(list,
+ Dir{dir: filepath.Join(buildCtx.GOROOT, "src"), inModule: true},
+ Dir{importPath: "cmd", dir: filepath.Join(buildCtx.GOROOT, "src", "cmd"), inModule: true})
+ }
+
+ if gomod == os.DevNull {
+ // Modules are enabled, but the working directory is outside any module.
+ // We can still access std, cmd, and packages specified as source files
+ // on the command line, but there are no module roots.
+ // Avoid 'go list -m all' below, since it will not work.
+ return list
+ }
+ }
+
+ if !usingModules {
+ list = append(list, Dir{dir: filepath.Join(buildCtx.GOROOT, "src")})
+ for _, root := range splitGopath() {
+ list = append(list, Dir{dir: filepath.Join(root, "src")})
+ }
+ return list
+ }
+
+ // Find module root directories from go list.
+ // Eventually we want golang.org/x/tools/go/packages
+ // to handle the entire file system search and become go/packages,
+ // but for now enumerating the module roots lets us fit modules
+ // into the current code with as few changes as possible.
+ mainMod, vendorEnabled, err := vendorEnabled()
+ if err != nil {
+ return list
+ }
+ if vendorEnabled {
+ // Add the vendor directory to the search path ahead of "std".
+ // That way, if the main module *is* "std", we will identify the path
+ // without the "vendor/" prefix before the one with that prefix.
+ list = append([]Dir{{dir: filepath.Join(mainMod.Dir, "vendor"), inModule: false}}, list...)
+ if mainMod.Path != "std" {
+ list = append(list, Dir{importPath: mainMod.Path, dir: mainMod.Dir, inModule: true})
+ }
+ return list
+ }
+
+ cmd := exec.Command("go", "list", "-m", "-f={{.Path}}\t{{.Dir}}", "all")
+ cmd.Stderr = os.Stderr
+ out, _ := cmd.Output()
+ for _, line := range strings.Split(string(out), "\n") {
+ i := strings.Index(line, "\t")
+ if i < 0 {
+ continue
+ }
+ path, dir := line[:i], line[i+1:]
+ if dir != "" {
+ list = append(list, Dir{importPath: path, dir: dir, inModule: true})
+ }
+ }
+
+ return list
+}
+
+// The functions below are derived from x/tools/internal/imports at CL 203017.
+
+type moduleJSON struct {
+ Path, Dir, GoVersion string
+}
+
+var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`)
+
+// vendorEnabled indicates if vendoring is enabled.
+// Inspired by setDefaultBuildMod in modload/init.go
+func vendorEnabled() (*moduleJSON, bool, error) {
+ mainMod, go114, err := getMainModuleAnd114()
+ if err != nil {
+ return nil, false, err
+ }
+
+ stdout, _ := exec.Command("go", "env", "GOFLAGS").Output()
+ goflags := string(bytes.TrimSpace(stdout))
+ matches := modFlagRegexp.FindStringSubmatch(goflags)
+ var modFlag string
+ if len(matches) != 0 {
+ modFlag = matches[1]
+ }
+ if modFlag != "" {
+ // Don't override an explicit '-mod=' argument.
+ return mainMod, modFlag == "vendor", nil
+ }
+ if mainMod == nil || !go114 {
+ return mainMod, false, nil
+ }
+ // Check 1.14's automatic vendor mode.
+ if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() {
+ if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 {
+ // The Go version is at least 1.14, and a vendor directory exists.
+ // Set -mod=vendor by default.
+ return mainMod, true, nil
+ }
+ }
+ return mainMod, false, nil
+}
+
+// getMainModuleAnd114 gets the main module's information and whether the
+// go command in use is 1.14+. This is the information needed to figure out
+// if vendoring should be enabled.
+func getMainModuleAnd114() (*moduleJSON, bool, error) {
+ const format = `{{.Path}}
+{{.Dir}}
+{{.GoVersion}}
+{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}}
+`
+ cmd := exec.Command("go", "list", "-m", "-f", format)
+ cmd.Stderr = os.Stderr
+ stdout, err := cmd.Output()
+ if err != nil {
+ return nil, false, nil
+ }
+ lines := strings.Split(string(stdout), "\n")
+ if len(lines) < 5 {
+ return nil, false, fmt.Errorf("unexpected stdout: %q", stdout)
+ }
+ mod := &moduleJSON{
+ Path: lines[0],
+ Dir: lines[1],
+ GoVersion: lines[2],
+ }
+ return mod, lines[3] == "go1.14", nil
+}
diff --git a/src/cmd/doc/doc_test.go b/src/cmd/doc/doc_test.go
new file mode 100644
index 0000000..39530e3
--- /dev/null
+++ b/src/cmd/doc/doc_test.go
@@ -0,0 +1,1049 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "bytes"
+ "flag"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strings"
+ "testing"
+)
+
+func TestMain(m *testing.M) {
+ // Clear GOPATH so we don't access the user's own packages in the test.
+ buildCtx.GOPATH = ""
+ testGOPATH = true // force GOPATH mode; module test is in cmd/go/testdata/script/mod_doc.txt
+
+ // Add $GOROOT/src/cmd/doc/testdata explicitly so we can access its contents in the test.
+ // Normally testdata directories are ignored, but sending it to dirs.scan directly is
+ // a hack that works around the check.
+ testdataDir, err := filepath.Abs("testdata")
+ if err != nil {
+ panic(err)
+ }
+ dirsInit(
+ Dir{importPath: "testdata", dir: testdataDir},
+ Dir{importPath: "testdata/nested", dir: filepath.Join(testdataDir, "nested")},
+ Dir{importPath: "testdata/nested/nested", dir: filepath.Join(testdataDir, "nested", "nested")})
+
+ os.Exit(m.Run())
+}
+
+func maybeSkip(t *testing.T) {
+ if runtime.GOOS == "ios" {
+ t.Skip("iOS does not have a full file tree")
+ }
+}
+
+type isDotSlashTest struct {
+ str string
+ result bool
+}
+
+var isDotSlashTests = []isDotSlashTest{
+ {``, false},
+ {`x`, false},
+ {`...`, false},
+ {`.../`, false},
+ {`...\`, false},
+
+ {`.`, true},
+ {`./`, true},
+ {`.\`, true},
+ {`./x`, true},
+ {`.\x`, true},
+
+ {`..`, true},
+ {`../`, true},
+ {`..\`, true},
+ {`../x`, true},
+ {`..\x`, true},
+}
+
+func TestIsDotSlashPath(t *testing.T) {
+ for _, test := range isDotSlashTests {
+ if result := isDotSlash(test.str); result != test.result {
+ t.Errorf("isDotSlash(%q) = %t; expected %t", test.str, result, test.result)
+ }
+ }
+}
+
+type test struct {
+ name string
+ args []string // Arguments to "[go] doc".
+ yes []string // Regular expressions that should match.
+ no []string // Regular expressions that should not match.
+}
+
+const p = "cmd/doc/testdata"
+
+var tests = []test{
+ // Sanity check.
+ {
+ "sanity check",
+ []string{p},
+ []string{`type ExportedType struct`},
+ nil,
+ },
+
+ // Package dump includes import, package statement.
+ {
+ "package clause",
+ []string{p},
+ []string{`package pkg.*cmd/doc/testdata`},
+ nil,
+ },
+
+ // Constants.
+ // Package dump
+ {
+ "full package",
+ []string{p},
+ []string{
+ `Package comment`,
+ `const ExportedConstant = 1`, // Simple constant.
+ `const ConstOne = 1`, // First entry in constant block.
+ `const ConstFive ...`, // From block starting with unexported constant.
+ `var ExportedVariable = 1`, // Simple variable.
+ `var VarOne = 1`, // First entry in variable block.
+ `func ExportedFunc\(a int\) bool`, // Function.
+ `func ReturnUnexported\(\) unexportedType`, // Function with unexported return type.
+ `type ExportedType struct{ ... }`, // Exported type.
+ `const ExportedTypedConstant ExportedType = iota`, // Typed constant.
+ `const ExportedTypedConstant_unexported unexportedType`, // Typed constant, exported for unexported type.
+ `const ConstLeft2 uint64 ...`, // Typed constant using unexported iota.
+ `const ConstGroup1 unexportedType = iota ...`, // Typed constant using unexported type.
+ `const ConstGroup4 ExportedType = ExportedType{}`, // Typed constant using exported type.
+ `const MultiLineConst = ...`, // Multi line constant.
+ `var MultiLineVar = map\[struct{ ... }\]struct{ ... }{ ... }`, // Multi line variable.
+ `func MultiLineFunc\(x interface{ ... }\) \(r struct{ ... }\)`, // Multi line function.
+ `var LongLine = newLongLine\(("someArgument[1-4]", ){4}...\)`, // Long list of arguments.
+ `type T1 = T2`, // Type alias
+ },
+ []string{
+ `const internalConstant = 2`, // No internal constants.
+ `var internalVariable = 2`, // No internal variables.
+ `func internalFunc(a int) bool`, // No internal functions.
+ `Comment about exported constant`, // No comment for single constant.
+ `Comment about exported variable`, // No comment for single variable.
+ `Comment about block of constants`, // No comment for constant block.
+ `Comment about block of variables`, // No comment for variable block.
+ `Comment before ConstOne`, // No comment for first entry in constant block.
+ `Comment before VarOne`, // No comment for first entry in variable block.
+ `ConstTwo = 2`, // No second entry in constant block.
+ `VarTwo = 2`, // No second entry in variable block.
+ `VarFive = 5`, // From block starting with unexported variable.
+ `type unexportedType`, // No unexported type.
+ `unexportedTypedConstant`, // No unexported typed constant.
+ `\bField`, // No fields.
+ `Method`, // No methods.
+ `someArgument[5-8]`, // No truncated arguments.
+ `type T1 T2`, // Type alias does not display as type declaration.
+ },
+ },
+ // Package dump -all
+ {
+ "full package",
+ []string{"-all", p},
+ []string{
+ `package pkg .*import`,
+ `Package comment`,
+ `CONSTANTS`,
+ `Comment before ConstOne`,
+ `ConstOne = 1`,
+ `ConstTwo = 2 // Comment on line with ConstTwo`,
+ `ConstFive`,
+ `ConstSix`,
+ `Const block where first entry is unexported`,
+ `ConstLeft2, constRight2 uint64`,
+ `constLeft3, ConstRight3`,
+ `ConstLeft4, ConstRight4`,
+ `Duplicate = iota`,
+ `const CaseMatch = 1`,
+ `const Casematch = 2`,
+ `const ExportedConstant = 1`,
+ `const MultiLineConst = `,
+ `MultiLineString1`,
+ `VARIABLES`,
+ `Comment before VarOne`,
+ `VarOne = 1`,
+ `Comment about block of variables`,
+ `VarFive = 5`,
+ `var ExportedVariable = 1`,
+ `var ExportedVarOfUnExported unexportedType`,
+ `var LongLine = newLongLine\(`,
+ `var MultiLineVar = map\[struct {`,
+ `FUNCTIONS`,
+ `func ExportedFunc\(a int\) bool`,
+ `Comment about exported function`,
+ `func MultiLineFunc\(x interface`,
+ `func ReturnUnexported\(\) unexportedType`,
+ `TYPES`,
+ `type ExportedInterface interface`,
+ `type ExportedStructOneField struct`,
+ `type ExportedType struct`,
+ `Comment about exported type`,
+ `const ConstGroup4 ExportedType = ExportedType`,
+ `ExportedTypedConstant ExportedType = iota`,
+ `Constants tied to ExportedType`,
+ `func ExportedTypeConstructor\(\) \*ExportedType`,
+ `Comment about constructor for exported type`,
+ `func ReturnExported\(\) ExportedType`,
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `Comment about exported method`,
+ `type T1 = T2`,
+ `type T2 int`,
+ },
+ []string{
+ `constThree`,
+ `_, _ uint64 = 2 \* iota, 1 << iota`,
+ `constLeft1, constRight1`,
+ `duplicate`,
+ `varFour`,
+ `func internalFunc`,
+ `unexportedField`,
+ `func \(unexportedType\)`,
+ },
+ },
+ // Package with just the package declaration. Issue 31457.
+ {
+ "only package declaration",
+ []string{"-all", p + "/nested/empty"},
+ []string{`package empty .*import`},
+ nil,
+ },
+ // Package dump -short
+ {
+ "full package with -short",
+ []string{`-short`, p},
+ []string{
+ `const ExportedConstant = 1`, // Simple constant.
+ `func ReturnUnexported\(\) unexportedType`, // Function with unexported return type.
+ },
+ []string{
+ `MultiLine(String|Method|Field)`, // No data from multi line portions.
+ },
+ },
+ // Package dump -u
+ {
+ "full package with u",
+ []string{`-u`, p},
+ []string{
+ `const ExportedConstant = 1`, // Simple constant.
+ `const internalConstant = 2`, // Internal constants.
+ `func internalFunc\(a int\) bool`, // Internal functions.
+ `func ReturnUnexported\(\) unexportedType`, // Function with unexported return type.
+ },
+ []string{
+ `Comment about exported constant`, // No comment for simple constant.
+ `Comment about block of constants`, // No comment for constant block.
+ `Comment about internal function`, // No comment for internal function.
+ `MultiLine(String|Method|Field)`, // No data from multi line portions.
+ },
+ },
+ // Package dump -u -all
+ {
+ "full package",
+ []string{"-u", "-all", p},
+ []string{
+ `package pkg .*import`,
+ `Package comment`,
+ `CONSTANTS`,
+ `Comment before ConstOne`,
+ `ConstOne += 1`,
+ `ConstTwo += 2 // Comment on line with ConstTwo`,
+ `constThree = 3 // Comment on line with constThree`,
+ `ConstFive`,
+ `const internalConstant += 2`,
+ `Comment about internal constant`,
+ `VARIABLES`,
+ `Comment before VarOne`,
+ `VarOne += 1`,
+ `Comment about block of variables`,
+ `varFour += 4`,
+ `VarFive += 5`,
+ `varSix += 6`,
+ `var ExportedVariable = 1`,
+ `var LongLine = newLongLine\(`,
+ `var MultiLineVar = map\[struct {`,
+ `var internalVariable = 2`,
+ `Comment about internal variable`,
+ `FUNCTIONS`,
+ `func ExportedFunc\(a int\) bool`,
+ `Comment about exported function`,
+ `func MultiLineFunc\(x interface`,
+ `func internalFunc\(a int\) bool`,
+ `Comment about internal function`,
+ `func newLongLine\(ss .*string\)`,
+ `TYPES`,
+ `type ExportedType struct`,
+ `type T1 = T2`,
+ `type T2 int`,
+ `type unexportedType int`,
+ `Comment about unexported type`,
+ `ConstGroup1 unexportedType = iota`,
+ `ConstGroup2`,
+ `ConstGroup3`,
+ `ExportedTypedConstant_unexported unexportedType = iota`,
+ `Constants tied to unexportedType`,
+ `const unexportedTypedConstant unexportedType = 1`,
+ `func ReturnUnexported\(\) unexportedType`,
+ `func \(unexportedType\) ExportedMethod\(\) bool`,
+ `func \(unexportedType\) unexportedMethod\(\) bool`,
+ },
+ nil,
+ },
+
+ // Single constant.
+ {
+ "single constant",
+ []string{p, `ExportedConstant`},
+ []string{
+ `Comment about exported constant`, // Include comment.
+ `const ExportedConstant = 1`,
+ },
+ nil,
+ },
+ // Single constant -u.
+ {
+ "single constant with -u",
+ []string{`-u`, p, `internalConstant`},
+ []string{
+ `Comment about internal constant`, // Include comment.
+ `const internalConstant = 2`,
+ },
+ nil,
+ },
+ // Block of constants.
+ {
+ "block of constants",
+ []string{p, `ConstTwo`},
+ []string{
+ `Comment before ConstOne.\n.*ConstOne = 1`, // First...
+ `ConstTwo = 2.*Comment on line with ConstTwo`, // And second show up.
+ `Comment about block of constants`, // Comment does too.
+ },
+ []string{
+ `constThree`, // No unexported constant.
+ },
+ },
+ // Block of constants -u.
+ {
+ "block of constants with -u",
+ []string{"-u", p, `constThree`},
+ []string{
+ `constThree = 3.*Comment on line with constThree`,
+ },
+ nil,
+ },
+ // Block of constants -src.
+ {
+ "block of constants with -src",
+ []string{"-src", p, `ConstTwo`},
+ []string{
+ `Comment about block of constants`, // Top comment.
+ `ConstOne.*=.*1`, // Each constant seen.
+ `ConstTwo.*=.*2.*Comment on line with ConstTwo`,
+ `constThree`, // Even unexported constants.
+ },
+ nil,
+ },
+ // Block of constants with carryover type from unexported field.
+ {
+ "block of constants with carryover type",
+ []string{p, `ConstLeft2`},
+ []string{
+ `ConstLeft2, constRight2 uint64`,
+ `constLeft3, ConstRight3`,
+ `ConstLeft4, ConstRight4`,
+ },
+ nil,
+ },
+ // Block of constants -u with carryover type from unexported field.
+ {
+ "block of constants with carryover type",
+ []string{"-u", p, `ConstLeft2`},
+ []string{
+ `_, _ uint64 = 2 \* iota, 1 << iota`,
+ `constLeft1, constRight1`,
+ `ConstLeft2, constRight2`,
+ `constLeft3, ConstRight3`,
+ `ConstLeft4, ConstRight4`,
+ },
+ nil,
+ },
+
+ // Single variable.
+ {
+ "single variable",
+ []string{p, `ExportedVariable`},
+ []string{
+ `ExportedVariable`, // Include comment.
+ `var ExportedVariable = 1`,
+ },
+ nil,
+ },
+ // Single variable -u.
+ {
+ "single variable with -u",
+ []string{`-u`, p, `internalVariable`},
+ []string{
+ `Comment about internal variable`, // Include comment.
+ `var internalVariable = 2`,
+ },
+ nil,
+ },
+ // Block of variables.
+ {
+ "block of variables",
+ []string{p, `VarTwo`},
+ []string{
+ `Comment before VarOne.\n.*VarOne = 1`, // First...
+ `VarTwo = 2.*Comment on line with VarTwo`, // And second show up.
+ `Comment about block of variables`, // Comment does too.
+ },
+ []string{
+ `varThree= 3`, // No unexported variable.
+ },
+ },
+ // Block of variables -u.
+ {
+ "block of variables with -u",
+ []string{"-u", p, `varThree`},
+ []string{
+ `varThree = 3.*Comment on line with varThree`,
+ },
+ nil,
+ },
+
+ // Function.
+ {
+ "function",
+ []string{p, `ExportedFunc`},
+ []string{
+ `Comment about exported function`, // Include comment.
+ `func ExportedFunc\(a int\) bool`,
+ },
+ nil,
+ },
+ // Function -u.
+ {
+ "function with -u",
+ []string{"-u", p, `internalFunc`},
+ []string{
+ `Comment about internal function`, // Include comment.
+ `func internalFunc\(a int\) bool`,
+ },
+ nil,
+ },
+ // Function with -src.
+ {
+ "function with -src",
+ []string{"-src", p, `ExportedFunc`},
+ []string{
+ `Comment about exported function`, // Include comment.
+ `func ExportedFunc\(a int\) bool`,
+ `return true != false`, // Include body.
+ },
+ nil,
+ },
+
+ // Type.
+ {
+ "type",
+ []string{p, `ExportedType`},
+ []string{
+ `Comment about exported type`, // Include comment.
+ `type ExportedType struct`, // Type definition.
+ `Comment before exported field.*\n.*ExportedField +int` +
+ `.*Comment on line with exported field`,
+ `ExportedEmbeddedType.*Comment on line with exported embedded field`,
+ `Has unexported fields`,
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `const ExportedTypedConstant ExportedType = iota`, // Must include associated constant.
+ `func ExportedTypeConstructor\(\) \*ExportedType`, // Must include constructor.
+ `io.Reader.*Comment on line with embedded Reader`,
+ },
+ []string{
+ `unexportedField`, // No unexported field.
+ `int.*embedded`, // No unexported embedded field.
+ `Comment about exported method`, // No comment about exported method.
+ `unexportedMethod`, // No unexported method.
+ `unexportedTypedConstant`, // No unexported constant.
+ `error`, // No embedded error.
+ },
+ },
+ // Type with -src. Will see unexported fields.
+ {
+ "type",
+ []string{"-src", p, `ExportedType`},
+ []string{
+ `Comment about exported type`, // Include comment.
+ `type ExportedType struct`, // Type definition.
+ `Comment before exported field`,
+ `ExportedField.*Comment on line with exported field`,
+ `ExportedEmbeddedType.*Comment on line with exported embedded field`,
+ `unexportedType.*Comment on line with unexported embedded field`,
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `const ExportedTypedConstant ExportedType = iota`, // Must include associated constant.
+ `func ExportedTypeConstructor\(\) \*ExportedType`, // Must include constructor.
+ `io.Reader.*Comment on line with embedded Reader`,
+ },
+ []string{
+ `Comment about exported method`, // No comment about exported method.
+ `unexportedMethod`, // No unexported method.
+ `unexportedTypedConstant`, // No unexported constant.
+ },
+ },
+ // Type -all.
+ {
+ "type",
+ []string{"-all", p, `ExportedType`},
+ []string{
+ `type ExportedType struct {`, // Type definition as source.
+ `Comment about exported type`, // Include comment afterwards.
+ `const ConstGroup4 ExportedType = ExportedType\{\}`, // Related constants.
+ `ExportedTypedConstant ExportedType = iota`,
+ `Constants tied to ExportedType`,
+ `func ExportedTypeConstructor\(\) \*ExportedType`,
+ `Comment about constructor for exported type.`,
+ `func ReturnExported\(\) ExportedType`,
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `Comment about exported method.`,
+ `func \(ExportedType\) Uncommented\(a int\) bool\n\n`, // Ensure line gap after method with no comment
+ },
+ []string{
+ `unexportedType`,
+ },
+ },
+ // Type T1 dump (alias).
+ {
+ "type T1",
+ []string{p + ".T1"},
+ []string{
+ `type T1 = T2`,
+ },
+ []string{
+ `type T1 T2`,
+ `type ExportedType`,
+ },
+ },
+ // Type -u with unexported fields.
+ {
+ "type with unexported fields and -u",
+ []string{"-u", p, `ExportedType`},
+ []string{
+ `Comment about exported type`, // Include comment.
+ `type ExportedType struct`, // Type definition.
+ `Comment before exported field.*\n.*ExportedField +int`,
+ `unexportedField.*int.*Comment on line with unexported field`,
+ `ExportedEmbeddedType.*Comment on line with exported embedded field`,
+ `\*ExportedEmbeddedType.*Comment on line with exported embedded \*field`,
+ `\*qualified.ExportedEmbeddedType.*Comment on line with exported embedded \*selector.field`,
+ `unexportedType.*Comment on line with unexported embedded field`,
+ `\*unexportedType.*Comment on line with unexported embedded \*field`,
+ `io.Reader.*Comment on line with embedded Reader`,
+ `error.*Comment on line with embedded error`,
+ `func \(ExportedType\) unexportedMethod\(a int\) bool`,
+ `unexportedTypedConstant`,
+ },
+ []string{
+ `Has unexported fields`,
+ },
+ },
+ // Unexported type with -u.
+ {
+ "unexported type with -u",
+ []string{"-u", p, `unexportedType`},
+ []string{
+ `Comment about unexported type`, // Include comment.
+ `type unexportedType int`, // Type definition.
+ `func \(unexportedType\) ExportedMethod\(\) bool`,
+ `func \(unexportedType\) unexportedMethod\(\) bool`,
+ `ExportedTypedConstant_unexported unexportedType = iota`,
+ `const unexportedTypedConstant unexportedType = 1`,
+ },
+ nil,
+ },
+
+ // Interface.
+ {
+ "interface type",
+ []string{p, `ExportedInterface`},
+ []string{
+ `Comment about exported interface`, // Include comment.
+ `type ExportedInterface interface`, // Interface definition.
+ `Comment before exported method.*\n.*ExportedMethod\(\)` +
+ `.*Comment on line with exported method`,
+ `io.Reader.*Comment on line with embedded Reader`,
+ `error.*Comment on line with embedded error`,
+ `Has unexported methods`,
+ },
+ []string{
+ `unexportedField`, // No unexported field.
+ `Comment about exported method`, // No comment about exported method.
+ `unexportedMethod`, // No unexported method.
+ `unexportedTypedConstant`, // No unexported constant.
+ },
+ },
+ // Interface -u with unexported methods.
+ {
+ "interface type with unexported methods and -u",
+ []string{"-u", p, `ExportedInterface`},
+ []string{
+ `Comment about exported interface`, // Include comment.
+ `type ExportedInterface interface`, // Interface definition.
+ `Comment before exported method.*\n.*ExportedMethod\(\)` +
+ `.*Comment on line with exported method`,
+ `unexportedMethod\(\).*Comment on line with unexported method`,
+ `io.Reader.*Comment on line with embedded Reader`,
+ `error.*Comment on line with embedded error`,
+ },
+ []string{
+ `Has unexported methods`,
+ },
+ },
+
+ // Interface method.
+ {
+ "interface method",
+ []string{p, `ExportedInterface.ExportedMethod`},
+ []string{
+ `Comment before exported method.*\n.*ExportedMethod\(\)` +
+ `.*Comment on line with exported method`,
+ },
+ []string{
+ `Comment about exported interface`,
+ },
+ },
+ // Interface method at package level.
+ {
+ "interface method at package level",
+ []string{p, `ExportedMethod`},
+ []string{
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `Comment about exported method`,
+ },
+ []string{
+ `Comment before exported method.*\n.*ExportedMethod\(\)` +
+ `.*Comment on line with exported method`,
+ },
+ },
+
+ // Method.
+ {
+ "method",
+ []string{p, `ExportedType.ExportedMethod`},
+ []string{
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `Comment about exported method`,
+ },
+ nil,
+ },
+ // Method with -u.
+ {
+ "method with -u",
+ []string{"-u", p, `ExportedType.unexportedMethod`},
+ []string{
+ `func \(ExportedType\) unexportedMethod\(a int\) bool`,
+ `Comment about unexported method`,
+ },
+ nil,
+ },
+ // Method with -src.
+ {
+ "method with -src",
+ []string{"-src", p, `ExportedType.ExportedMethod`},
+ []string{
+ `func \(ExportedType\) ExportedMethod\(a int\) bool`,
+ `Comment about exported method`,
+ `return true != true`,
+ },
+ nil,
+ },
+
+ // Field.
+ {
+ "field",
+ []string{p, `ExportedType.ExportedField`},
+ []string{
+ `type ExportedType struct`,
+ `ExportedField int`,
+ `Comment before exported field`,
+ `Comment on line with exported field`,
+ `other fields elided`,
+ },
+ nil,
+ },
+
+ // Field with -u.
+ {
+ "method with -u",
+ []string{"-u", p, `ExportedType.unexportedField`},
+ []string{
+ `unexportedField int`,
+ `Comment on line with unexported field`,
+ },
+ nil,
+ },
+
+ // Field of struct with only one field.
+ {
+ "single-field struct",
+ []string{p, `ExportedStructOneField.OnlyField`},
+ []string{`the only field`},
+ []string{`other fields elided`},
+ },
+
+ // Case matching off.
+ {
+ "case matching off",
+ []string{p, `casematch`},
+ []string{
+ `CaseMatch`,
+ `Casematch`,
+ },
+ nil,
+ },
+
+ // Case matching on.
+ {
+ "case matching on",
+ []string{"-c", p, `Casematch`},
+ []string{
+ `Casematch`,
+ },
+ []string{
+ `CaseMatch`,
+ },
+ },
+
+ // Merging comments with -src.
+ {
+ "merge comments with -src A",
+ []string{"-src", p + "/merge", `A`},
+ []string{
+ `A doc`,
+ `func A`,
+ `A comment`,
+ },
+ []string{
+ `Package A doc`,
+ `Package B doc`,
+ `B doc`,
+ `B comment`,
+ `B doc`,
+ },
+ },
+ {
+ "merge comments with -src B",
+ []string{"-src", p + "/merge", `B`},
+ []string{
+ `B doc`,
+ `func B`,
+ `B comment`,
+ },
+ []string{
+ `Package A doc`,
+ `Package B doc`,
+ `A doc`,
+ `A comment`,
+ `A doc`,
+ },
+ },
+
+ // No dups with -u. Issue 21797.
+ {
+ "case matching on, no dups",
+ []string{"-u", p, `duplicate`},
+ []string{
+ `Duplicate`,
+ `duplicate`,
+ },
+ []string{
+ "\\)\n+const", // This will appear if the const decl appears twice.
+ },
+ },
+ {
+ "non-imported: pkg.sym",
+ []string{"nested.Foo"},
+ []string{"Foo struct"},
+ nil,
+ },
+ {
+ "non-imported: pkg only",
+ []string{"nested"},
+ []string{"Foo struct"},
+ nil,
+ },
+ {
+ "non-imported: pkg sym",
+ []string{"nested", "Foo"},
+ []string{"Foo struct"},
+ nil,
+ },
+ {
+ "formatted doc on function",
+ []string{p, "ExportedFormattedDoc"},
+ []string{
+ `func ExportedFormattedDoc\(a int\) bool`,
+ ` Comment about exported function with formatting\.
+
+ Example
+
+ fmt\.Println\(FormattedDoc\(\)\)
+
+ Text after pre-formatted block\.`,
+ },
+ nil,
+ },
+ {
+ "formatted doc on type field",
+ []string{p, "ExportedFormattedType.ExportedField"},
+ []string{
+ `type ExportedFormattedType struct`,
+ ` // Comment before exported field with formatting\.
+ //[ ]
+ // Example
+ //[ ]
+ // a\.ExportedField = 123
+ //[ ]
+ // Text after pre-formatted block\.`,
+ `ExportedField int`,
+ },
+ nil,
+ },
+}
+
+func TestDoc(t *testing.T) {
+ maybeSkip(t)
+ for _, test := range tests {
+ var b bytes.Buffer
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, test.args)
+ if err != nil {
+ t.Fatalf("%s %v: %s\n", test.name, test.args, err)
+ }
+ output := b.Bytes()
+ failed := false
+ for j, yes := range test.yes {
+ re, err := regexp.Compile(yes)
+ if err != nil {
+ t.Fatalf("%s.%d: compiling %#q: %s", test.name, j, yes, err)
+ }
+ if !re.Match(output) {
+ t.Errorf("%s.%d: no match for %s %#q", test.name, j, test.args, yes)
+ failed = true
+ }
+ }
+ for j, no := range test.no {
+ re, err := regexp.Compile(no)
+ if err != nil {
+ t.Fatalf("%s.%d: compiling %#q: %s", test.name, j, no, err)
+ }
+ if re.Match(output) {
+ t.Errorf("%s.%d: incorrect match for %s %#q", test.name, j, test.args, no)
+ failed = true
+ }
+ }
+ if bytes.Count(output, []byte("TYPES\n")) > 1 {
+ t.Fatalf("%s: repeating headers", test.name)
+ }
+ if failed {
+ t.Logf("\n%s", output)
+ }
+ }
+}
+
+// Test the code to try multiple packages. Our test case is
+// go doc rand.Float64
+// This needs to find math/rand.Float64; however crypto/rand, which doesn't
+// have the symbol, usually appears first in the directory listing.
+func TestMultiplePackages(t *testing.T) {
+ if testing.Short() {
+ t.Skip("scanning file system takes too long")
+ }
+ maybeSkip(t)
+ var b bytes.Buffer // We don't care about the output.
+ // Make sure crypto/rand does not have the symbol.
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"crypto/rand.float64"})
+ if err == nil {
+ t.Errorf("expected error from crypto/rand.float64")
+ } else if !strings.Contains(err.Error(), "no symbol float64") {
+ t.Errorf("unexpected error %q from crypto/rand.float64", err)
+ }
+ }
+ // Make sure math/rand does have the symbol.
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"math/rand.float64"})
+ if err != nil {
+ t.Errorf("unexpected error %q from math/rand.float64", err)
+ }
+ }
+ // Try the shorthand.
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"rand.float64"})
+ if err != nil {
+ t.Errorf("unexpected error %q from rand.float64", err)
+ }
+ }
+ // Now try a missing symbol. We should see both packages in the error.
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"rand.doesnotexit"})
+ if err == nil {
+ t.Errorf("expected error from rand.doesnotexit")
+ } else {
+ errStr := err.Error()
+ if !strings.Contains(errStr, "no symbol") {
+ t.Errorf("error %q should contain 'no symbol", errStr)
+ }
+ if !strings.Contains(errStr, "crypto/rand") {
+ t.Errorf("error %q should contain crypto/rand", errStr)
+ }
+ if !strings.Contains(errStr, "math/rand") {
+ t.Errorf("error %q should contain math/rand", errStr)
+ }
+ }
+ }
+}
+
+// Test the code to look up packages when given two args. First test case is
+// go doc binary BigEndian
+// This needs to find encoding/binary.BigEndian, which means
+// finding the package encoding/binary given only "binary".
+// Second case is
+// go doc rand Float64
+// which again needs to find math/rand and not give up after crypto/rand,
+// which has no such function.
+func TestTwoArgLookup(t *testing.T) {
+ if testing.Short() {
+ t.Skip("scanning file system takes too long")
+ }
+ maybeSkip(t)
+ var b bytes.Buffer // We don't care about the output.
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"binary", "BigEndian"})
+ if err != nil {
+ t.Errorf("unexpected error %q from binary BigEndian", err)
+ }
+ }
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"rand", "Float64"})
+ if err != nil {
+ t.Errorf("unexpected error %q from rand Float64", err)
+ }
+ }
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"bytes", "Foo"})
+ if err == nil {
+ t.Errorf("expected error from bytes Foo")
+ } else if !strings.Contains(err.Error(), "no symbol Foo") {
+ t.Errorf("unexpected error %q from bytes Foo", err)
+ }
+ }
+ {
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"nosuchpackage", "Foo"})
+ if err == nil {
+ // actually present in the user's filesystem
+ } else if !strings.Contains(err.Error(), "no such package") {
+ t.Errorf("unexpected error %q from nosuchpackage Foo", err)
+ }
+ }
+}
+
+// Test the code to look up packages when the first argument starts with "./".
+// Our test case is in effect "cd src/text; doc ./template". This should get
+// text/template but before Issue 23383 was fixed would give html/template.
+func TestDotSlashLookup(t *testing.T) {
+ if testing.Short() {
+ t.Skip("scanning file system takes too long")
+ }
+ maybeSkip(t)
+ where, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer func() {
+ if err := os.Chdir(where); err != nil {
+ t.Fatal(err)
+ }
+ }()
+ if err := os.Chdir(filepath.Join(buildCtx.GOROOT, "src", "text")); err != nil {
+ t.Fatal(err)
+ }
+ var b bytes.Buffer
+ var flagSet flag.FlagSet
+ err = do(&b, &flagSet, []string{"./template"})
+ if err != nil {
+ t.Errorf("unexpected error %q from ./template", err)
+ }
+ // The output should contain information about the text/template package.
+ const want = `package template // import "text/template"`
+ output := b.String()
+ if !strings.HasPrefix(output, want) {
+ t.Fatalf("wrong package: %.*q...", len(want), output)
+ }
+}
+
+// Test that we don't print spurious package clauses
+// when there should be no output at all. Issue 37969.
+func TestNoPackageClauseWhenNoMatch(t *testing.T) {
+ maybeSkip(t)
+ var b bytes.Buffer
+ var flagSet flag.FlagSet
+ err := do(&b, &flagSet, []string{"template.ZZZ"})
+ // Expect an error.
+ if err == nil {
+ t.Error("expect an error for template.zzz")
+ }
+ // And the output should not contain any package clauses.
+ const dontWant = `package template // import `
+ output := b.String()
+ if strings.Contains(output, dontWant) {
+ t.Fatalf("improper package clause printed:\n%s", output)
+ }
+}
+
+type trimTest struct {
+ path string
+ prefix string
+ result string
+ ok bool
+}
+
+var trimTests = []trimTest{
+ {"", "", "", true},
+ {"/usr/gopher", "/usr/gopher", "/usr/gopher", true},
+ {"/usr/gopher/bar", "/usr/gopher", "bar", true},
+ {"/usr/gopherflakes", "/usr/gopher", "/usr/gopherflakes", false},
+ {"/usr/gopher/bar", "/usr/zot", "/usr/gopher/bar", false},
+}
+
+func TestTrim(t *testing.T) {
+ for _, test := range trimTests {
+ result, ok := trim(test.path, test.prefix)
+ if ok != test.ok {
+ t.Errorf("%s %s expected %t got %t", test.path, test.prefix, test.ok, ok)
+ continue
+ }
+ if result != test.result {
+ t.Errorf("%s %s expected %q got %q", test.path, test.prefix, test.result, result)
+ continue
+ }
+ }
+}
diff --git a/src/cmd/doc/main.go b/src/cmd/doc/main.go
new file mode 100644
index 0000000..0499c40
--- /dev/null
+++ b/src/cmd/doc/main.go
@@ -0,0 +1,408 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Doc (usually run as go doc) accepts zero, one or two arguments.
+//
+// Zero arguments:
+// go doc
+// Show the documentation for the package in the current directory.
+//
+// One argument:
+// go doc <pkg>
+// go doc <sym>[.<methodOrField>]
+// go doc [<pkg>.]<sym>[.<methodOrField>]
+// go doc [<pkg>.][<sym>.]<methodOrField>
+// The first item in this list that succeeds is the one whose documentation
+// is printed. If there is a symbol but no package, the package in the current
+// directory is chosen. However, if the argument begins with a capital
+// letter it is always assumed to be a symbol in the current directory.
+//
+// Two arguments:
+// go doc <pkg> <sym>[.<methodOrField>]
+//
+// Show the documentation for the package, symbol, and method or field. The
+// first argument must be a full package path. This is similar to the
+// command-line usage for the godoc command.
+//
+// For commands, unless the -cmd flag is present "go doc command"
+// shows only the package-level docs for the package.
+//
+// The -src flag causes doc to print the full source code for the symbol, such
+// as the body of a struct, function or method.
+//
+// The -all flag causes doc to print all documentation for the package and
+// all its visible symbols. The argument must identify a package.
+//
+// For complete documentation, run "go help doc".
+package main
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "go/build"
+ "go/token"
+ "io"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+var (
+ unexported bool // -u flag
+ matchCase bool // -c flag
+ showAll bool // -all flag
+ showCmd bool // -cmd flag
+ showSrc bool // -src flag
+ short bool // -short flag
+)
+
+// usage is a replacement usage function for the flags package.
+func usage() {
+ fmt.Fprintf(os.Stderr, "Usage of [go] doc:\n")
+ fmt.Fprintf(os.Stderr, "\tgo doc\n")
+ fmt.Fprintf(os.Stderr, "\tgo doc <pkg>\n")
+ fmt.Fprintf(os.Stderr, "\tgo doc <sym>[.<methodOrField>]\n")
+ fmt.Fprintf(os.Stderr, "\tgo doc [<pkg>.]<sym>[.<methodOrField>]\n")
+ fmt.Fprintf(os.Stderr, "\tgo doc [<pkg>.][<sym>.]<methodOrField>\n")
+ fmt.Fprintf(os.Stderr, "\tgo doc <pkg> <sym>[.<methodOrField>]\n")
+ fmt.Fprintf(os.Stderr, "For more information run\n")
+ fmt.Fprintf(os.Stderr, "\tgo help doc\n\n")
+ fmt.Fprintf(os.Stderr, "Flags:\n")
+ flag.PrintDefaults()
+ os.Exit(2)
+}
+
+func main() {
+ log.SetFlags(0)
+ log.SetPrefix("doc: ")
+ dirsInit()
+ err := do(os.Stdout, flag.CommandLine, os.Args[1:])
+ if err != nil {
+ log.Fatal(err)
+ }
+}
+
+// do is the workhorse, broken out of main to make testing easier.
+func do(writer io.Writer, flagSet *flag.FlagSet, args []string) (err error) {
+ flagSet.Usage = usage
+ unexported = false
+ matchCase = false
+ flagSet.BoolVar(&unexported, "u", false, "show unexported symbols as well as exported")
+ flagSet.BoolVar(&matchCase, "c", false, "symbol matching honors case (paths not affected)")
+ flagSet.BoolVar(&showAll, "all", false, "show all documentation for package")
+ flagSet.BoolVar(&showCmd, "cmd", false, "show symbols with package docs even if package is a command")
+ flagSet.BoolVar(&showSrc, "src", false, "show source code for symbol")
+ flagSet.BoolVar(&short, "short", false, "one-line representation for each symbol")
+ flagSet.Parse(args)
+ var paths []string
+ var symbol, method string
+ // Loop until something is printed.
+ dirs.Reset()
+ for i := 0; ; i++ {
+ buildPackage, userPath, sym, more := parseArgs(flagSet.Args())
+ if i > 0 && !more { // Ignore the "more" bit on the first iteration.
+ return failMessage(paths, symbol, method)
+ }
+ if buildPackage == nil {
+ return fmt.Errorf("no such package: %s", userPath)
+ }
+ symbol, method = parseSymbol(sym)
+ pkg := parsePackage(writer, buildPackage, userPath)
+ paths = append(paths, pkg.prettyPath())
+
+ defer func() {
+ pkg.flush()
+ e := recover()
+ if e == nil {
+ return
+ }
+ pkgError, ok := e.(PackageError)
+ if ok {
+ err = pkgError
+ return
+ }
+ panic(e)
+ }()
+
+ // The builtin package needs special treatment: its symbols are lower
+ // case but we want to see them, always.
+ if pkg.build.ImportPath == "builtin" {
+ unexported = true
+ }
+
+ // We have a package.
+ if showAll && symbol == "" {
+ pkg.allDoc()
+ return
+ }
+
+ switch {
+ case symbol == "":
+ pkg.packageDoc() // The package exists, so we got some output.
+ return
+ case method == "":
+ if pkg.symbolDoc(symbol) {
+ return
+ }
+ default:
+ if pkg.methodDoc(symbol, method) {
+ return
+ }
+ if pkg.fieldDoc(symbol, method) {
+ return
+ }
+ }
+ }
+}
+
+// failMessage creates a nicely formatted error message when there is no result to show.
+func failMessage(paths []string, symbol, method string) error {
+ var b bytes.Buffer
+ if len(paths) > 1 {
+ b.WriteString("s")
+ }
+ b.WriteString(" ")
+ for i, path := range paths {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(path)
+ }
+ if method == "" {
+ return fmt.Errorf("no symbol %s in package%s", symbol, &b)
+ }
+ return fmt.Errorf("no method or field %s.%s in package%s", symbol, method, &b)
+}
+
+// parseArgs analyzes the arguments (if any) and returns the package
+// it represents, the part of the argument the user used to identify
+// the path (or "" if it's the current package) and the symbol
+// (possibly with a .method) within that package.
+// parseSymbol is used to analyze the symbol itself.
+// The boolean final argument reports whether it is possible that
+// there may be more directories worth looking at. It will only
+// be true if the package path is a partial match for some directory
+// and there may be more matches. For example, if the argument
+// is rand.Float64, we must scan both crypto/rand and math/rand
+// to find the symbol, and the first call will return crypto/rand, true.
+func parseArgs(args []string) (pkg *build.Package, path, symbol string, more bool) {
+ wd, err := os.Getwd()
+ if err != nil {
+ log.Fatal(err)
+ }
+ if len(args) == 0 {
+ // Easy: current directory.
+ return importDir(wd), "", "", false
+ }
+ arg := args[0]
+ // We have an argument. If it is a directory name beginning with . or ..,
+ // use the absolute path name. This discriminates "./errors" from "errors"
+ // if the current directory contains a non-standard errors package.
+ if isDotSlash(arg) {
+ arg = filepath.Join(wd, arg)
+ }
+ switch len(args) {
+ default:
+ usage()
+ case 1:
+ // Done below.
+ case 2:
+ // Package must be findable and importable.
+ pkg, err := build.Import(args[0], wd, build.ImportComment)
+ if err == nil {
+ return pkg, args[0], args[1], false
+ }
+ for {
+ packagePath, ok := findNextPackage(arg)
+ if !ok {
+ break
+ }
+ if pkg, err := build.ImportDir(packagePath, build.ImportComment); err == nil {
+ return pkg, arg, args[1], true
+ }
+ }
+ return nil, args[0], args[1], false
+ }
+ // Usual case: one argument.
+ // If it contains slashes, it begins with either a package path
+ // or an absolute directory.
+ // First, is it a complete package path as it is? If so, we are done.
+ // This avoids confusion over package paths that have other
+ // package paths as their prefix.
+ var importErr error
+ if filepath.IsAbs(arg) {
+ pkg, importErr = build.ImportDir(arg, build.ImportComment)
+ if importErr == nil {
+ return pkg, arg, "", false
+ }
+ } else {
+ pkg, importErr = build.Import(arg, wd, build.ImportComment)
+ if importErr == nil {
+ return pkg, arg, "", false
+ }
+ }
+ // Another disambiguator: If the argument starts with an upper
+ // case letter, it can only be a symbol in the current directory.
+ // Kills the problem caused by case-insensitive file systems
+ // matching an upper case name as a package name.
+ if !strings.ContainsAny(arg, `/\`) && token.IsExported(arg) {
+ pkg, err := build.ImportDir(".", build.ImportComment)
+ if err == nil {
+ return pkg, "", arg, false
+ }
+ }
+ // If it has a slash, it must be a package path but there is a symbol.
+ // It's the last package path we care about.
+ slash := strings.LastIndex(arg, "/")
+ // There may be periods in the package path before or after the slash
+ // and between a symbol and method.
+ // Split the string at various periods to see what we find.
+ // In general there may be ambiguities but this should almost always
+ // work.
+ var period int
+ // slash+1: if there's no slash, the value is -1 and start is 0; otherwise
+ // start is the byte after the slash.
+ for start := slash + 1; start < len(arg); start = period + 1 {
+ period = strings.Index(arg[start:], ".")
+ symbol := ""
+ if period < 0 {
+ period = len(arg)
+ } else {
+ period += start
+ symbol = arg[period+1:]
+ }
+ // Have we identified a package already?
+ pkg, err := build.Import(arg[0:period], wd, build.ImportComment)
+ if err == nil {
+ return pkg, arg[0:period], symbol, false
+ }
+ // See if we have the basename or tail of a package, as in json for encoding/json
+ // or ivy/value for robpike.io/ivy/value.
+ pkgName := arg[:period]
+ for {
+ path, ok := findNextPackage(pkgName)
+ if !ok {
+ break
+ }
+ if pkg, err = build.ImportDir(path, build.ImportComment); err == nil {
+ return pkg, arg[0:period], symbol, true
+ }
+ }
+ dirs.Reset() // Next iteration of for loop must scan all the directories again.
+ }
+ // If it has a slash, we've failed.
+ if slash >= 0 {
+ // build.Import should always include the path in its error message,
+ // and we should avoid repeating it. Unfortunately, build.Import doesn't
+ // return a structured error. That can't easily be fixed, since it
+ // invokes 'go list' and returns the error text from the loaded package.
+ // TODO(golang.org/issue/34750): load using golang.org/x/tools/go/packages
+ // instead of go/build.
+ importErrStr := importErr.Error()
+ if strings.Contains(importErrStr, arg[:period]) {
+ log.Fatal(importErrStr)
+ } else {
+ log.Fatalf("no such package %s: %s", arg[:period], importErrStr)
+ }
+ }
+ // Guess it's a symbol in the current directory.
+ return importDir(wd), "", arg, false
+}
+
+// dotPaths lists all the dotted paths legal on Unix-like and
+// Windows-like file systems. We check them all, as the chance
+// of error is minute and even on Windows people will use ./
+// sometimes.
+var dotPaths = []string{
+ `./`,
+ `../`,
+ `.\`,
+ `..\`,
+}
+
+// isDotSlash reports whether the path begins with a reference
+// to the local . or .. directory.
+func isDotSlash(arg string) bool {
+ if arg == "." || arg == ".." {
+ return true
+ }
+ for _, dotPath := range dotPaths {
+ if strings.HasPrefix(arg, dotPath) {
+ return true
+ }
+ }
+ return false
+}
+
+// importDir is just an error-catching wrapper for build.ImportDir.
+func importDir(dir string) *build.Package {
+ pkg, err := build.ImportDir(dir, build.ImportComment)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return pkg
+}
+
+// parseSymbol breaks str apart into a symbol and method.
+// Both may be missing or the method may be missing.
+// If present, each must be a valid Go identifier.
+func parseSymbol(str string) (symbol, method string) {
+ if str == "" {
+ return
+ }
+ elem := strings.Split(str, ".")
+ switch len(elem) {
+ case 1:
+ case 2:
+ method = elem[1]
+ default:
+ log.Printf("too many periods in symbol specification")
+ usage()
+ }
+ symbol = elem[0]
+ return
+}
+
+// isExported reports whether the name is an exported identifier.
+// If the unexported flag (-u) is true, isExported returns true because
+// it means that we treat the name as if it is exported.
+func isExported(name string) bool {
+ return unexported || token.IsExported(name)
+}
+
+// findNextPackage returns the next full file name path that matches the
+// (perhaps partial) package path pkg. The boolean reports if any match was found.
+func findNextPackage(pkg string) (string, bool) {
+ if filepath.IsAbs(pkg) {
+ if dirs.offset == 0 {
+ dirs.offset = -1
+ return pkg, true
+ }
+ return "", false
+ }
+ if pkg == "" || token.IsExported(pkg) { // Upper case symbol cannot be a package name.
+ return "", false
+ }
+ pkg = path.Clean(pkg)
+ pkgSuffix := "/" + pkg
+ for {
+ d, ok := dirs.Next()
+ if !ok {
+ return "", false
+ }
+ if d.importPath == pkg || strings.HasSuffix(d.importPath, pkgSuffix) {
+ return d.dir, true
+ }
+ }
+}
+
+var buildCtx = build.Default
+
+// splitGopath splits $GOPATH into a list of roots.
+func splitGopath() []string {
+ return filepath.SplitList(buildCtx.GOPATH)
+}
diff --git a/src/cmd/doc/pkg.go b/src/cmd/doc/pkg.go
new file mode 100644
index 0000000..c2e06eb
--- /dev/null
+++ b/src/cmd/doc/pkg.go
@@ -0,0 +1,1085 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/doc"
+ "go/format"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io"
+ "io/fs"
+ "log"
+ "path/filepath"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+const (
+ punchedCardWidth = 80 // These things just won't leave us alone.
+ indentedWidth = punchedCardWidth - len(indent)
+ indent = " "
+)
+
+type Package struct {
+ writer io.Writer // Destination for output.
+ name string // Package name, json for encoding/json.
+ userPath string // String the user used to find this package.
+ pkg *ast.Package // Parsed package.
+ file *ast.File // Merged from all files in the package
+ doc *doc.Package
+ build *build.Package
+ typedValue map[*doc.Value]bool // Consts and vars related to types.
+ constructor map[*doc.Func]bool // Constructors.
+ fs *token.FileSet // Needed for printing.
+ buf pkgBuffer
+}
+
+// pkgBuffer is a wrapper for bytes.Buffer that prints a package clause the
+// first time Write is called.
+type pkgBuffer struct {
+ pkg *Package
+ printed bool // Prevent repeated package clauses.
+ bytes.Buffer
+}
+
+func (pb *pkgBuffer) Write(p []byte) (int, error) {
+ pb.packageClause()
+ return pb.Buffer.Write(p)
+}
+
+func (pb *pkgBuffer) packageClause() {
+ if !pb.printed {
+ pb.printed = true
+ // Only show package clause for commands if requested explicitly.
+ if pb.pkg.pkg.Name != "main" || showCmd {
+ pb.pkg.packageClause()
+ }
+ }
+}
+
+type PackageError string // type returned by pkg.Fatalf.
+
+func (p PackageError) Error() string {
+ return string(p)
+}
+
+// prettyPath returns a version of the package path that is suitable for an
+// error message. It obeys the import comment if present. Also, since
+// pkg.build.ImportPath is sometimes the unhelpful "" or ".", it looks for a
+// directory name in GOROOT or GOPATH if that happens.
+func (pkg *Package) prettyPath() string {
+ path := pkg.build.ImportComment
+ if path == "" {
+ path = pkg.build.ImportPath
+ }
+ if path != "." && path != "" {
+ return path
+ }
+ // Convert the source directory into a more useful path.
+ // Also convert everything to slash-separated paths for uniform handling.
+ path = filepath.Clean(filepath.ToSlash(pkg.build.Dir))
+ // Can we find a decent prefix?
+ goroot := filepath.Join(buildCtx.GOROOT, "src")
+ if p, ok := trim(path, filepath.ToSlash(goroot)); ok {
+ return p
+ }
+ for _, gopath := range splitGopath() {
+ if p, ok := trim(path, filepath.ToSlash(gopath)); ok {
+ return p
+ }
+ }
+ return path
+}
+
+// trim trims the directory prefix from the path, paying attention
+// to the path separator. If they are the same string or the prefix
+// is not present the original is returned. The boolean reports whether
+// the prefix is present. That path and prefix have slashes for separators.
+func trim(path, prefix string) (string, bool) {
+ if !strings.HasPrefix(path, prefix) {
+ return path, false
+ }
+ if path == prefix {
+ return path, true
+ }
+ if path[len(prefix)] == '/' {
+ return path[len(prefix)+1:], true
+ }
+ return path, false // Textual prefix but not a path prefix.
+}
+
+// pkg.Fatalf is like log.Fatalf, but panics so it can be recovered in the
+// main do function, so it doesn't cause an exit. Allows testing to work
+// without running a subprocess. The log prefix will be added when
+// logged in main; it is not added here.
+func (pkg *Package) Fatalf(format string, args ...interface{}) {
+ panic(PackageError(fmt.Sprintf(format, args...)))
+}
+
+// parsePackage turns the build package we found into a parsed package
+// we can then use to generate documentation.
+func parsePackage(writer io.Writer, pkg *build.Package, userPath string) *Package {
+ // include tells parser.ParseDir which files to include.
+ // That means the file must be in the build package's GoFiles or CgoFiles
+ // list only (no tag-ignored files, tests, swig or other non-Go files).
+ include := func(info fs.FileInfo) bool {
+ for _, name := range pkg.GoFiles {
+ if name == info.Name() {
+ return true
+ }
+ }
+ for _, name := range pkg.CgoFiles {
+ if name == info.Name() {
+ return true
+ }
+ }
+ return false
+ }
+ fset := token.NewFileSet()
+ pkgs, err := parser.ParseDir(fset, pkg.Dir, include, parser.ParseComments)
+ if err != nil {
+ log.Fatal(err)
+ }
+ // Make sure they are all in one package.
+ if len(pkgs) == 0 {
+ log.Fatalf("no source-code package in directory %s", pkg.Dir)
+ }
+ if len(pkgs) > 1 {
+ log.Fatalf("multiple packages in directory %s", pkg.Dir)
+ }
+ astPkg := pkgs[pkg.Name]
+
+ // TODO: go/doc does not include typed constants in the constants
+ // list, which is what we want. For instance, time.Sunday is of type
+ // time.Weekday, so it is defined in the type but not in the
+ // Consts list for the package. This prevents
+ // go doc time.Sunday
+ // from finding the symbol. Work around this for now, but we
+ // should fix it in go/doc.
+ // A similar story applies to factory functions.
+ mode := doc.AllDecls
+ if showSrc {
+ mode |= doc.PreserveAST // See comment for Package.emit.
+ }
+ docPkg := doc.New(astPkg, pkg.ImportPath, mode)
+ typedValue := make(map[*doc.Value]bool)
+ constructor := make(map[*doc.Func]bool)
+ for _, typ := range docPkg.Types {
+ docPkg.Consts = append(docPkg.Consts, typ.Consts...)
+ docPkg.Vars = append(docPkg.Vars, typ.Vars...)
+ docPkg.Funcs = append(docPkg.Funcs, typ.Funcs...)
+ if isExported(typ.Name) {
+ for _, value := range typ.Consts {
+ typedValue[value] = true
+ }
+ for _, value := range typ.Vars {
+ typedValue[value] = true
+ }
+ for _, fun := range typ.Funcs {
+ // We don't count it as a constructor bound to the type
+ // if the type itself is not exported.
+ constructor[fun] = true
+ }
+ }
+ }
+
+ p := &Package{
+ writer: writer,
+ name: pkg.Name,
+ userPath: userPath,
+ pkg: astPkg,
+ file: ast.MergePackageFiles(astPkg, 0),
+ doc: docPkg,
+ typedValue: typedValue,
+ constructor: constructor,
+ build: pkg,
+ fs: fset,
+ }
+ p.buf.pkg = p
+ return p
+}
+
+func (pkg *Package) Printf(format string, args ...interface{}) {
+ fmt.Fprintf(&pkg.buf, format, args...)
+}
+
+func (pkg *Package) flush() {
+ _, err := pkg.writer.Write(pkg.buf.Bytes())
+ if err != nil {
+ log.Fatal(err)
+ }
+ pkg.buf.Reset() // Not needed, but it's a flush.
+}
+
+var newlineBytes = []byte("\n\n") // We never ask for more than 2.
+
+// newlines guarantees there are n newlines at the end of the buffer.
+func (pkg *Package) newlines(n int) {
+ for !bytes.HasSuffix(pkg.buf.Bytes(), newlineBytes[:n]) {
+ pkg.buf.WriteRune('\n')
+ }
+}
+
+// emit prints the node. If showSrc is true, it ignores the provided comment,
+// assuming the comment is in the node itself. Otherwise, the go/doc package
+// clears the stuff we don't want to print anyway. It's a bit of a magic trick.
+func (pkg *Package) emit(comment string, node ast.Node) {
+ if node != nil {
+ var arg interface{} = node
+ if showSrc {
+ // Need an extra little dance to get internal comments to appear.
+ arg = &printer.CommentedNode{
+ Node: node,
+ Comments: pkg.file.Comments,
+ }
+ }
+ err := format.Node(&pkg.buf, pkg.fs, arg)
+ if err != nil {
+ log.Fatal(err)
+ }
+ if comment != "" && !showSrc {
+ pkg.newlines(1)
+ doc.ToText(&pkg.buf, comment, indent, indent+indent, indentedWidth)
+ pkg.newlines(2) // Blank line after comment to separate from next item.
+ } else {
+ pkg.newlines(1)
+ }
+ }
+}
+
+// oneLineNode returns a one-line summary of the given input node.
+func (pkg *Package) oneLineNode(node ast.Node) string {
+ const maxDepth = 10
+ return pkg.oneLineNodeDepth(node, maxDepth)
+}
+
+// oneLineNodeDepth returns a one-line summary of the given input node.
+// The depth specifies the maximum depth when traversing the AST.
+func (pkg *Package) oneLineNodeDepth(node ast.Node, depth int) string {
+ const dotDotDot = "..."
+ if depth == 0 {
+ return dotDotDot
+ }
+ depth--
+
+ switch n := node.(type) {
+ case nil:
+ return ""
+
+ case *ast.GenDecl:
+ // Formats const and var declarations.
+ trailer := ""
+ if len(n.Specs) > 1 {
+ trailer = " " + dotDotDot
+ }
+
+ // Find the first relevant spec.
+ typ := ""
+ for i, spec := range n.Specs {
+ valueSpec := spec.(*ast.ValueSpec) // Must succeed; we can't mix types in one GenDecl.
+
+ // The type name may carry over from a previous specification in the
+ // case of constants and iota.
+ if valueSpec.Type != nil {
+ typ = fmt.Sprintf(" %s", pkg.oneLineNodeDepth(valueSpec.Type, depth))
+ } else if len(valueSpec.Values) > 0 {
+ typ = ""
+ }
+
+ if !isExported(valueSpec.Names[0].Name) {
+ continue
+ }
+ val := ""
+ if i < len(valueSpec.Values) && valueSpec.Values[i] != nil {
+ val = fmt.Sprintf(" = %s", pkg.oneLineNodeDepth(valueSpec.Values[i], depth))
+ }
+ return fmt.Sprintf("%s %s%s%s%s", n.Tok, valueSpec.Names[0], typ, val, trailer)
+ }
+ return ""
+
+ case *ast.FuncDecl:
+ // Formats func declarations.
+ name := n.Name.Name
+ recv := pkg.oneLineNodeDepth(n.Recv, depth)
+ if len(recv) > 0 {
+ recv = "(" + recv + ") "
+ }
+ fnc := pkg.oneLineNodeDepth(n.Type, depth)
+ if strings.Index(fnc, "func") == 0 {
+ fnc = fnc[4:]
+ }
+ return fmt.Sprintf("func %s%s%s", recv, name, fnc)
+
+ case *ast.TypeSpec:
+ sep := " "
+ if n.Assign.IsValid() {
+ sep = " = "
+ }
+ return fmt.Sprintf("type %s%s%s", n.Name.Name, sep, pkg.oneLineNodeDepth(n.Type, depth))
+
+ case *ast.FuncType:
+ var params []string
+ if n.Params != nil {
+ for _, field := range n.Params.List {
+ params = append(params, pkg.oneLineField(field, depth))
+ }
+ }
+ needParens := false
+ var results []string
+ if n.Results != nil {
+ needParens = needParens || len(n.Results.List) > 1
+ for _, field := range n.Results.List {
+ needParens = needParens || len(field.Names) > 0
+ results = append(results, pkg.oneLineField(field, depth))
+ }
+ }
+
+ param := joinStrings(params)
+ if len(results) == 0 {
+ return fmt.Sprintf("func(%s)", param)
+ }
+ result := joinStrings(results)
+ if !needParens {
+ return fmt.Sprintf("func(%s) %s", param, result)
+ }
+ return fmt.Sprintf("func(%s) (%s)", param, result)
+
+ case *ast.StructType:
+ if n.Fields == nil || len(n.Fields.List) == 0 {
+ return "struct{}"
+ }
+ return "struct{ ... }"
+
+ case *ast.InterfaceType:
+ if n.Methods == nil || len(n.Methods.List) == 0 {
+ return "interface{}"
+ }
+ return "interface{ ... }"
+
+ case *ast.FieldList:
+ if n == nil || len(n.List) == 0 {
+ return ""
+ }
+ if len(n.List) == 1 {
+ return pkg.oneLineField(n.List[0], depth)
+ }
+ return dotDotDot
+
+ case *ast.FuncLit:
+ return pkg.oneLineNodeDepth(n.Type, depth) + " { ... }"
+
+ case *ast.CompositeLit:
+ typ := pkg.oneLineNodeDepth(n.Type, depth)
+ if len(n.Elts) == 0 {
+ return fmt.Sprintf("%s{}", typ)
+ }
+ return fmt.Sprintf("%s{ %s }", typ, dotDotDot)
+
+ case *ast.ArrayType:
+ length := pkg.oneLineNodeDepth(n.Len, depth)
+ element := pkg.oneLineNodeDepth(n.Elt, depth)
+ return fmt.Sprintf("[%s]%s", length, element)
+
+ case *ast.MapType:
+ key := pkg.oneLineNodeDepth(n.Key, depth)
+ value := pkg.oneLineNodeDepth(n.Value, depth)
+ return fmt.Sprintf("map[%s]%s", key, value)
+
+ case *ast.CallExpr:
+ fnc := pkg.oneLineNodeDepth(n.Fun, depth)
+ var args []string
+ for _, arg := range n.Args {
+ args = append(args, pkg.oneLineNodeDepth(arg, depth))
+ }
+ return fmt.Sprintf("%s(%s)", fnc, joinStrings(args))
+
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("%s%s", n.Op, pkg.oneLineNodeDepth(n.X, depth))
+
+ case *ast.Ident:
+ return n.Name
+
+ default:
+ // As a fallback, use default formatter for all unknown node types.
+ buf := new(bytes.Buffer)
+ format.Node(buf, pkg.fs, node)
+ s := buf.String()
+ if strings.Contains(s, "\n") {
+ return dotDotDot
+ }
+ return s
+ }
+}
+
+// oneLineField returns a one-line summary of the field.
+func (pkg *Package) oneLineField(field *ast.Field, depth int) string {
+ var names []string
+ for _, name := range field.Names {
+ names = append(names, name.Name)
+ }
+ if len(names) == 0 {
+ return pkg.oneLineNodeDepth(field.Type, depth)
+ }
+ return joinStrings(names) + " " + pkg.oneLineNodeDepth(field.Type, depth)
+}
+
+// joinStrings formats the input as a comma-separated list,
+// but truncates the list at some reasonable length if necessary.
+func joinStrings(ss []string) string {
+ var n int
+ for i, s := range ss {
+ n += len(s) + len(", ")
+ if n > punchedCardWidth {
+ ss = append(ss[:i:i], "...")
+ break
+ }
+ }
+ return strings.Join(ss, ", ")
+}
+
+// allDoc prints all the docs for the package.
+func (pkg *Package) allDoc() {
+ pkg.Printf("") // Trigger the package clause; we know the package exists.
+ doc.ToText(&pkg.buf, pkg.doc.Doc, "", indent, indentedWidth)
+ pkg.newlines(1)
+
+ printed := make(map[*ast.GenDecl]bool)
+
+ hdr := ""
+ printHdr := func(s string) {
+ if hdr != s {
+ pkg.Printf("\n%s\n\n", s)
+ hdr = s
+ }
+ }
+
+ // Constants.
+ for _, value := range pkg.doc.Consts {
+ // Constants and variables come in groups, and valueDoc prints
+ // all the items in the group. We only need to find one exported symbol.
+ for _, name := range value.Names {
+ if isExported(name) && !pkg.typedValue[value] {
+ printHdr("CONSTANTS")
+ pkg.valueDoc(value, printed)
+ break
+ }
+ }
+ }
+
+ // Variables.
+ for _, value := range pkg.doc.Vars {
+ // Constants and variables come in groups, and valueDoc prints
+ // all the items in the group. We only need to find one exported symbol.
+ for _, name := range value.Names {
+ if isExported(name) && !pkg.typedValue[value] {
+ printHdr("VARIABLES")
+ pkg.valueDoc(value, printed)
+ break
+ }
+ }
+ }
+
+ // Functions.
+ for _, fun := range pkg.doc.Funcs {
+ if isExported(fun.Name) && !pkg.constructor[fun] {
+ printHdr("FUNCTIONS")
+ pkg.emit(fun.Doc, fun.Decl)
+ }
+ }
+
+ // Types.
+ for _, typ := range pkg.doc.Types {
+ if isExported(typ.Name) {
+ printHdr("TYPES")
+ pkg.typeDoc(typ)
+ }
+ }
+}
+
+// packageDoc prints the docs for the package (package doc plus one-liners of the rest).
+func (pkg *Package) packageDoc() {
+ pkg.Printf("") // Trigger the package clause; we know the package exists.
+ if !short {
+ doc.ToText(&pkg.buf, pkg.doc.Doc, "", indent, indentedWidth)
+ pkg.newlines(1)
+ }
+
+ if pkg.pkg.Name == "main" && !showCmd {
+ // Show only package docs for commands.
+ return
+ }
+
+ if !short {
+ pkg.newlines(2) // Guarantee blank line before the components.
+ }
+
+ pkg.valueSummary(pkg.doc.Consts, false)
+ pkg.valueSummary(pkg.doc.Vars, false)
+ pkg.funcSummary(pkg.doc.Funcs, false)
+ pkg.typeSummary()
+ if !short {
+ pkg.bugs()
+ }
+}
+
+// packageClause prints the package clause.
+func (pkg *Package) packageClause() {
+ if short {
+ return
+ }
+ importPath := pkg.build.ImportComment
+ if importPath == "" {
+ importPath = pkg.build.ImportPath
+ }
+
+ // If we're using modules, the import path derived from module code locations wins.
+ // If we did a file system scan, we knew the import path when we found the directory.
+ // But if we started with a directory name, we never knew the import path.
+ // Either way, we don't know it now, and it's cheap to (re)compute it.
+ if usingModules {
+ for _, root := range codeRoots() {
+ if pkg.build.Dir == root.dir {
+ importPath = root.importPath
+ break
+ }
+ if strings.HasPrefix(pkg.build.Dir, root.dir+string(filepath.Separator)) {
+ suffix := filepath.ToSlash(pkg.build.Dir[len(root.dir)+1:])
+ if root.importPath == "" {
+ importPath = suffix
+ } else {
+ importPath = root.importPath + "/" + suffix
+ }
+ break
+ }
+ }
+ }
+
+ pkg.Printf("package %s // import %q\n\n", pkg.name, importPath)
+ if !usingModules && importPath != pkg.build.ImportPath {
+ pkg.Printf("WARNING: package source is installed in %q\n", pkg.build.ImportPath)
+ }
+}
+
+// valueSummary prints a one-line summary for each set of values and constants.
+// If all the types in a constant or variable declaration belong to the same
+// type they can be printed by typeSummary, and so can be suppressed here.
+func (pkg *Package) valueSummary(values []*doc.Value, showGrouped bool) {
+ var isGrouped map[*doc.Value]bool
+ if !showGrouped {
+ isGrouped = make(map[*doc.Value]bool)
+ for _, typ := range pkg.doc.Types {
+ if !isExported(typ.Name) {
+ continue
+ }
+ for _, c := range typ.Consts {
+ isGrouped[c] = true
+ }
+ for _, v := range typ.Vars {
+ isGrouped[v] = true
+ }
+ }
+ }
+
+ for _, value := range values {
+ if !isGrouped[value] {
+ if decl := pkg.oneLineNode(value.Decl); decl != "" {
+ pkg.Printf("%s\n", decl)
+ }
+ }
+ }
+}
+
+// funcSummary prints a one-line summary for each function. Constructors
+// are printed by typeSummary, below, and so can be suppressed here.
+func (pkg *Package) funcSummary(funcs []*doc.Func, showConstructors bool) {
+ for _, fun := range funcs {
+ // Exported functions only. The go/doc package does not include methods here.
+ if isExported(fun.Name) {
+ if showConstructors || !pkg.constructor[fun] {
+ pkg.Printf("%s\n", pkg.oneLineNode(fun.Decl))
+ }
+ }
+ }
+}
+
+// typeSummary prints a one-line summary for each type, followed by its constructors.
+func (pkg *Package) typeSummary() {
+ for _, typ := range pkg.doc.Types {
+ for _, spec := range typ.Decl.Specs {
+ typeSpec := spec.(*ast.TypeSpec) // Must succeed.
+ if isExported(typeSpec.Name.Name) {
+ pkg.Printf("%s\n", pkg.oneLineNode(typeSpec))
+ // Now print the consts, vars, and constructors.
+ for _, c := range typ.Consts {
+ if decl := pkg.oneLineNode(c.Decl); decl != "" {
+ pkg.Printf(indent+"%s\n", decl)
+ }
+ }
+ for _, v := range typ.Vars {
+ if decl := pkg.oneLineNode(v.Decl); decl != "" {
+ pkg.Printf(indent+"%s\n", decl)
+ }
+ }
+ for _, constructor := range typ.Funcs {
+ if isExported(constructor.Name) {
+ pkg.Printf(indent+"%s\n", pkg.oneLineNode(constructor.Decl))
+ }
+ }
+ }
+ }
+ }
+}
+
+// bugs prints the BUGS information for the package.
+// TODO: Provide access to TODOs and NOTEs as well (very noisy so off by default)?
+func (pkg *Package) bugs() {
+ if pkg.doc.Notes["BUG"] == nil {
+ return
+ }
+ pkg.Printf("\n")
+ for _, note := range pkg.doc.Notes["BUG"] {
+ pkg.Printf("%s: %v\n", "BUG", note.Body)
+ }
+}
+
+// findValues finds the doc.Values that describe the symbol.
+func (pkg *Package) findValues(symbol string, docValues []*doc.Value) (values []*doc.Value) {
+ for _, value := range docValues {
+ for _, name := range value.Names {
+ if match(symbol, name) {
+ values = append(values, value)
+ }
+ }
+ }
+ return
+}
+
+// findFuncs finds the doc.Funcs that describes the symbol.
+func (pkg *Package) findFuncs(symbol string) (funcs []*doc.Func) {
+ for _, fun := range pkg.doc.Funcs {
+ if match(symbol, fun.Name) {
+ funcs = append(funcs, fun)
+ }
+ }
+ return
+}
+
+// findTypes finds the doc.Types that describes the symbol.
+// If symbol is empty, it finds all exported types.
+func (pkg *Package) findTypes(symbol string) (types []*doc.Type) {
+ for _, typ := range pkg.doc.Types {
+ if symbol == "" && isExported(typ.Name) || match(symbol, typ.Name) {
+ types = append(types, typ)
+ }
+ }
+ return
+}
+
+// findTypeSpec returns the ast.TypeSpec within the declaration that defines the symbol.
+// The name must match exactly.
+func (pkg *Package) findTypeSpec(decl *ast.GenDecl, symbol string) *ast.TypeSpec {
+ for _, spec := range decl.Specs {
+ typeSpec := spec.(*ast.TypeSpec) // Must succeed.
+ if symbol == typeSpec.Name.Name {
+ return typeSpec
+ }
+ }
+ return nil
+}
+
+// symbolDoc prints the docs for symbol. There may be multiple matches.
+// If symbol matches a type, output includes its methods factories and associated constants.
+// If there is no top-level symbol, symbolDoc looks for methods that match.
+func (pkg *Package) symbolDoc(symbol string) bool {
+ found := false
+ // Functions.
+ for _, fun := range pkg.findFuncs(symbol) {
+ // Symbol is a function.
+ decl := fun.Decl
+ pkg.emit(fun.Doc, decl)
+ found = true
+ }
+ // Constants and variables behave the same.
+ values := pkg.findValues(symbol, pkg.doc.Consts)
+ values = append(values, pkg.findValues(symbol, pkg.doc.Vars)...)
+ // A declaration like
+ // const ( c = 1; C = 2 )
+ // could be printed twice if the -u flag is set, as it matches twice.
+ // So we remember which declarations we've printed to avoid duplication.
+ printed := make(map[*ast.GenDecl]bool)
+ for _, value := range values {
+ pkg.valueDoc(value, printed)
+ found = true
+ }
+ // Types.
+ for _, typ := range pkg.findTypes(symbol) {
+ pkg.typeDoc(typ)
+ found = true
+ }
+ if !found {
+ // See if there are methods.
+ if !pkg.printMethodDoc("", symbol) {
+ return false
+ }
+ }
+ return true
+}
+
+// valueDoc prints the docs for a constant or variable.
+func (pkg *Package) valueDoc(value *doc.Value, printed map[*ast.GenDecl]bool) {
+ if printed[value.Decl] {
+ return
+ }
+ // Print each spec only if there is at least one exported symbol in it.
+ // (See issue 11008.)
+ // TODO: Should we elide unexported symbols from a single spec?
+ // It's an unlikely scenario, probably not worth the trouble.
+ // TODO: Would be nice if go/doc did this for us.
+ specs := make([]ast.Spec, 0, len(value.Decl.Specs))
+ var typ ast.Expr
+ for _, spec := range value.Decl.Specs {
+ vspec := spec.(*ast.ValueSpec)
+
+ // The type name may carry over from a previous specification in the
+ // case of constants and iota.
+ if vspec.Type != nil {
+ typ = vspec.Type
+ }
+
+ for _, ident := range vspec.Names {
+ if showSrc || isExported(ident.Name) {
+ if vspec.Type == nil && vspec.Values == nil && typ != nil {
+ // This a standalone identifier, as in the case of iota usage.
+ // Thus, assume the type comes from the previous type.
+ vspec.Type = &ast.Ident{
+ Name: pkg.oneLineNode(typ),
+ NamePos: vspec.End() - 1,
+ }
+ }
+
+ specs = append(specs, vspec)
+ typ = nil // Only inject type on first exported identifier
+ break
+ }
+ }
+ }
+ if len(specs) == 0 {
+ return
+ }
+ value.Decl.Specs = specs
+ pkg.emit(value.Doc, value.Decl)
+ printed[value.Decl] = true
+}
+
+// typeDoc prints the docs for a type, including constructors and other items
+// related to it.
+func (pkg *Package) typeDoc(typ *doc.Type) {
+ decl := typ.Decl
+ spec := pkg.findTypeSpec(decl, typ.Name)
+ trimUnexportedElems(spec)
+ // If there are multiple types defined, reduce to just this one.
+ if len(decl.Specs) > 1 {
+ decl.Specs = []ast.Spec{spec}
+ }
+ pkg.emit(typ.Doc, decl)
+ pkg.newlines(2)
+ // Show associated methods, constants, etc.
+ if showAll {
+ printed := make(map[*ast.GenDecl]bool)
+ // We can use append here to print consts, then vars. Ditto for funcs and methods.
+ values := typ.Consts
+ values = append(values, typ.Vars...)
+ for _, value := range values {
+ for _, name := range value.Names {
+ if isExported(name) {
+ pkg.valueDoc(value, printed)
+ break
+ }
+ }
+ }
+ funcs := typ.Funcs
+ funcs = append(funcs, typ.Methods...)
+ for _, fun := range funcs {
+ if isExported(fun.Name) {
+ pkg.emit(fun.Doc, fun.Decl)
+ if fun.Doc == "" {
+ pkg.newlines(2)
+ }
+ }
+ }
+ } else {
+ pkg.valueSummary(typ.Consts, true)
+ pkg.valueSummary(typ.Vars, true)
+ pkg.funcSummary(typ.Funcs, true)
+ pkg.funcSummary(typ.Methods, true)
+ }
+}
+
+// trimUnexportedElems modifies spec in place to elide unexported fields from
+// structs and methods from interfaces (unless the unexported flag is set or we
+// are asked to show the original source).
+func trimUnexportedElems(spec *ast.TypeSpec) {
+ if unexported || showSrc {
+ return
+ }
+ switch typ := spec.Type.(type) {
+ case *ast.StructType:
+ typ.Fields = trimUnexportedFields(typ.Fields, false)
+ case *ast.InterfaceType:
+ typ.Methods = trimUnexportedFields(typ.Methods, true)
+ }
+}
+
+// trimUnexportedFields returns the field list trimmed of unexported fields.
+func trimUnexportedFields(fields *ast.FieldList, isInterface bool) *ast.FieldList {
+ what := "methods"
+ if !isInterface {
+ what = "fields"
+ }
+
+ trimmed := false
+ list := make([]*ast.Field, 0, len(fields.List))
+ for _, field := range fields.List {
+ names := field.Names
+ if len(names) == 0 {
+ // Embedded type. Use the name of the type. It must be of the form ident or
+ // pkg.ident (for structs and interfaces), or *ident or *pkg.ident (structs only).
+ // Nothing else is allowed.
+ ty := field.Type
+ if se, ok := field.Type.(*ast.StarExpr); !isInterface && ok {
+ // The form *ident or *pkg.ident is only valid on
+ // embedded types in structs.
+ ty = se.X
+ }
+ switch ident := ty.(type) {
+ case *ast.Ident:
+ if isInterface && ident.Name == "error" && ident.Obj == nil {
+ // For documentation purposes, we consider the builtin error
+ // type special when embedded in an interface, such that it
+ // always gets shown publicly.
+ list = append(list, field)
+ continue
+ }
+ names = []*ast.Ident{ident}
+ case *ast.SelectorExpr:
+ // An embedded type may refer to a type in another package.
+ names = []*ast.Ident{ident.Sel}
+ }
+ if names == nil {
+ // Can only happen if AST is incorrect. Safe to continue with a nil list.
+ log.Print("invalid program: unexpected type for embedded field")
+ }
+ }
+ // Trims if any is unexported. Good enough in practice.
+ ok := true
+ for _, name := range names {
+ if !isExported(name.Name) {
+ trimmed = true
+ ok = false
+ break
+ }
+ }
+ if ok {
+ list = append(list, field)
+ }
+ }
+ if !trimmed {
+ return fields
+ }
+ unexportedField := &ast.Field{
+ Type: &ast.Ident{
+ // Hack: printer will treat this as a field with a named type.
+ // Setting Name and NamePos to ("", fields.Closing-1) ensures that
+ // when Pos and End are called on this field, they return the
+ // position right before closing '}' character.
+ Name: "",
+ NamePos: fields.Closing - 1,
+ },
+ Comment: &ast.CommentGroup{
+ List: []*ast.Comment{{Text: fmt.Sprintf("// Has unexported %s.\n", what)}},
+ },
+ }
+ return &ast.FieldList{
+ Opening: fields.Opening,
+ List: append(list, unexportedField),
+ Closing: fields.Closing,
+ }
+}
+
+// printMethodDoc prints the docs for matches of symbol.method.
+// If symbol is empty, it prints all methods for any concrete type
+// that match the name. It reports whether it found any methods.
+func (pkg *Package) printMethodDoc(symbol, method string) bool {
+ types := pkg.findTypes(symbol)
+ if types == nil {
+ if symbol == "" {
+ return false
+ }
+ pkg.Fatalf("symbol %s is not a type in package %s installed in %q", symbol, pkg.name, pkg.build.ImportPath)
+ }
+ found := false
+ for _, typ := range types {
+ if len(typ.Methods) > 0 {
+ for _, meth := range typ.Methods {
+ if match(method, meth.Name) {
+ decl := meth.Decl
+ pkg.emit(meth.Doc, decl)
+ found = true
+ }
+ }
+ continue
+ }
+ if symbol == "" {
+ continue
+ }
+ // Type may be an interface. The go/doc package does not attach
+ // an interface's methods to the doc.Type. We need to dig around.
+ spec := pkg.findTypeSpec(typ.Decl, typ.Name)
+ inter, ok := spec.Type.(*ast.InterfaceType)
+ if !ok {
+ // Not an interface type.
+ continue
+ }
+ for _, iMethod := range inter.Methods.List {
+ // This is an interface, so there can be only one name.
+ // TODO: Anonymous methods (embedding)
+ if len(iMethod.Names) == 0 {
+ continue
+ }
+ name := iMethod.Names[0].Name
+ if match(method, name) {
+ if iMethod.Doc != nil {
+ for _, comment := range iMethod.Doc.List {
+ doc.ToText(&pkg.buf, comment.Text, "", indent, indentedWidth)
+ }
+ }
+ s := pkg.oneLineNode(iMethod.Type)
+ // Hack: s starts "func" but there is no name present.
+ // We could instead build a FuncDecl but it's not worthwhile.
+ lineComment := ""
+ if iMethod.Comment != nil {
+ lineComment = fmt.Sprintf(" %s", iMethod.Comment.List[0].Text)
+ }
+ pkg.Printf("func %s%s%s\n", name, s[4:], lineComment)
+ found = true
+ }
+ }
+ }
+ return found
+}
+
+// printFieldDoc prints the docs for matches of symbol.fieldName.
+// It reports whether it found any field.
+// Both symbol and fieldName must be non-empty or it returns false.
+func (pkg *Package) printFieldDoc(symbol, fieldName string) bool {
+ if symbol == "" || fieldName == "" {
+ return false
+ }
+ types := pkg.findTypes(symbol)
+ if types == nil {
+ pkg.Fatalf("symbol %s is not a type in package %s installed in %q", symbol, pkg.name, pkg.build.ImportPath)
+ }
+ found := false
+ numUnmatched := 0
+ for _, typ := range types {
+ // Type must be a struct.
+ spec := pkg.findTypeSpec(typ.Decl, typ.Name)
+ structType, ok := spec.Type.(*ast.StructType)
+ if !ok {
+ // Not a struct type.
+ continue
+ }
+ for _, field := range structType.Fields.List {
+ // TODO: Anonymous fields.
+ for _, name := range field.Names {
+ if !match(fieldName, name.Name) {
+ numUnmatched++
+ continue
+ }
+ if !found {
+ pkg.Printf("type %s struct {\n", typ.Name)
+ }
+ if field.Doc != nil {
+ // To present indented blocks in comments correctly, process the comment as
+ // a unit before adding the leading // to each line.
+ docBuf := bytes.Buffer{}
+ doc.ToText(&docBuf, field.Doc.Text(), "", indent, indentedWidth)
+ scanner := bufio.NewScanner(&docBuf)
+ for scanner.Scan() {
+ fmt.Fprintf(&pkg.buf, "%s// %s\n", indent, scanner.Bytes())
+ }
+ }
+ s := pkg.oneLineNode(field.Type)
+ lineComment := ""
+ if field.Comment != nil {
+ lineComment = fmt.Sprintf(" %s", field.Comment.List[0].Text)
+ }
+ pkg.Printf("%s%s %s%s\n", indent, name, s, lineComment)
+ found = true
+ }
+ }
+ }
+ if found {
+ if numUnmatched > 0 {
+ pkg.Printf("\n // ... other fields elided ...\n")
+ }
+ pkg.Printf("}\n")
+ }
+ return found
+}
+
+// methodDoc prints the docs for matches of symbol.method.
+func (pkg *Package) methodDoc(symbol, method string) bool {
+ return pkg.printMethodDoc(symbol, method)
+}
+
+// fieldDoc prints the docs for matches of symbol.field.
+func (pkg *Package) fieldDoc(symbol, field string) bool {
+ return pkg.printFieldDoc(symbol, field)
+}
+
+// match reports whether the user's symbol matches the program's.
+// A lower-case character in the user's string matches either case in the program's.
+// The program string must be exported.
+func match(user, program string) bool {
+ if !isExported(program) {
+ return false
+ }
+ if matchCase {
+ return user == program
+ }
+ for _, u := range user {
+ p, w := utf8.DecodeRuneInString(program)
+ program = program[w:]
+ if u == p {
+ continue
+ }
+ if unicode.IsLower(u) && simpleFold(u) == simpleFold(p) {
+ continue
+ }
+ return false
+ }
+ return program == ""
+}
+
+// simpleFold returns the minimum rune equivalent to r
+// under Unicode-defined simple case folding.
+func simpleFold(r rune) rune {
+ for {
+ r1 := unicode.SimpleFold(r)
+ if r1 <= r {
+ return r1 // wrapped around, found min
+ }
+ r = r1
+ }
+}
diff --git a/src/cmd/doc/testdata/merge/aa.go b/src/cmd/doc/testdata/merge/aa.go
new file mode 100644
index 0000000..f8ab92d
--- /dev/null
+++ b/src/cmd/doc/testdata/merge/aa.go
@@ -0,0 +1,7 @@
+// Package comment A.
+package merge
+
+// A doc.
+func A() {
+ // A comment.
+}
diff --git a/src/cmd/doc/testdata/merge/bb.go b/src/cmd/doc/testdata/merge/bb.go
new file mode 100644
index 0000000..fd8cf3c
--- /dev/null
+++ b/src/cmd/doc/testdata/merge/bb.go
@@ -0,0 +1,7 @@
+// Package comment B.
+package merge
+
+// B doc.
+func B() {
+ // B comment.
+}
diff --git a/src/cmd/doc/testdata/nested/empty/empty.go b/src/cmd/doc/testdata/nested/empty/empty.go
new file mode 100644
index 0000000..609cf0e
--- /dev/null
+++ b/src/cmd/doc/testdata/nested/empty/empty.go
@@ -0,0 +1 @@
+package empty
diff --git a/src/cmd/doc/testdata/nested/ignore.go b/src/cmd/doc/testdata/nested/ignore.go
new file mode 100644
index 0000000..c497f1b
--- /dev/null
+++ b/src/cmd/doc/testdata/nested/ignore.go
@@ -0,0 +1,4 @@
+// +build ignore
+
+// Ignored package
+package nested
diff --git a/src/cmd/doc/testdata/nested/nested/real.go b/src/cmd/doc/testdata/nested/nested/real.go
new file mode 100644
index 0000000..1e55460
--- /dev/null
+++ b/src/cmd/doc/testdata/nested/nested/real.go
@@ -0,0 +1,4 @@
+package nested
+
+type Foo struct {
+}
diff --git a/src/cmd/doc/testdata/pkg.go b/src/cmd/doc/testdata/pkg.go
new file mode 100644
index 0000000..d695bdf
--- /dev/null
+++ b/src/cmd/doc/testdata/pkg.go
@@ -0,0 +1,233 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package comment.
+package pkg
+
+import "io"
+
+// Constants
+
+// Comment about exported constant.
+const ExportedConstant = 1
+
+// Comment about internal constant.
+const internalConstant = 2
+
+// Comment about block of constants.
+const (
+ // Comment before ConstOne.
+ ConstOne = 1
+ ConstTwo = 2 // Comment on line with ConstTwo.
+ constThree = 3 // Comment on line with constThree.
+)
+
+// Const block where first entry is unexported.
+const (
+ constFour = iota
+ ConstFive
+ ConstSix
+)
+
+// Variables
+
+// Comment about exported variable.
+var ExportedVariable = 1
+
+var ExportedVarOfUnExported unexportedType
+
+// Comment about internal variable.
+var internalVariable = 2
+
+// Comment about block of variables.
+var (
+ // Comment before VarOne.
+ VarOne = 1
+ VarTwo = 2 // Comment on line with VarTwo.
+ varThree = 3 // Comment on line with varThree.
+)
+
+// Var block where first entry is unexported.
+var (
+ varFour = 4
+ VarFive = 5
+ varSix = 6
+)
+
+// Comment about exported function.
+func ExportedFunc(a int) bool {
+ return true != false
+}
+
+// Comment about internal function.
+func internalFunc(a int) bool
+
+// Comment about exported type.
+type ExportedType struct {
+ // Comment before exported field.
+ ExportedField int // Comment on line with exported field.
+ unexportedField int // Comment on line with unexported field.
+ ExportedEmbeddedType // Comment on line with exported embedded field.
+ *ExportedEmbeddedType // Comment on line with exported embedded *field.
+ *qualified.ExportedEmbeddedType // Comment on line with exported embedded *selector.field.
+ unexportedType // Comment on line with unexported embedded field.
+ *unexportedType // Comment on line with unexported embedded *field.
+ io.Reader // Comment on line with embedded Reader.
+ error // Comment on line with embedded error.
+}
+
+// Comment about exported method.
+func (ExportedType) ExportedMethod(a int) bool {
+ return true != true
+}
+
+func (ExportedType) Uncommented(a int) bool {
+ return true != true
+}
+
+// Comment about unexported method.
+func (ExportedType) unexportedMethod(a int) bool {
+ return true
+}
+
+type ExportedStructOneField struct {
+ OnlyField int // the only field
+}
+
+// Constants tied to ExportedType. (The type is a struct so this isn't valid Go,
+// but it parses and that's all we need.)
+const (
+ ExportedTypedConstant ExportedType = iota
+)
+
+// Comment about constructor for exported type.
+func ExportedTypeConstructor() *ExportedType {
+ return nil
+}
+
+const unexportedTypedConstant ExportedType = 1 // In a separate section to test -u.
+
+// Comment about exported interface.
+type ExportedInterface interface {
+ // Comment before exported method.
+ ExportedMethod() // Comment on line with exported method.
+ unexportedMethod() // Comment on line with unexported method.
+ io.Reader // Comment on line with embedded Reader.
+ error // Comment on line with embedded error.
+}
+
+// Comment about unexported type.
+type unexportedType int
+
+func (unexportedType) ExportedMethod() bool {
+ return true
+}
+
+func (unexportedType) unexportedMethod() bool {
+ return true
+}
+
+// Constants tied to unexportedType.
+const (
+ ExportedTypedConstant_unexported unexportedType = iota
+)
+
+const unexportedTypedConstant unexportedType = 1 // In a separate section to test -u.
+
+// For case matching.
+const CaseMatch = 1
+const Casematch = 2
+
+func ReturnUnexported() unexportedType { return 0 }
+func ReturnExported() ExportedType { return ExportedType{} }
+
+const MultiLineConst = `
+ MultiLineString1
+ MultiLineString2
+ MultiLineString3
+`
+
+func MultiLineFunc(x interface {
+ MultiLineMethod1() int
+ MultiLineMethod2() int
+ MultiLineMethod3() int
+}) (r struct {
+ MultiLineField1 int
+ MultiLineField2 int
+ MultiLineField3 int
+}) {
+ return r
+}
+
+var MultiLineVar = map[struct {
+ MultiLineField1 string
+ MultiLineField2 uint64
+}]struct {
+ MultiLineField3 error
+ MultiLineField2 error
+}{
+ {"FieldVal1", 1}: {},
+ {"FieldVal2", 2}: {},
+ {"FieldVal3", 3}: {},
+}
+
+const (
+ _, _ uint64 = 2 * iota, 1 << iota
+ constLeft1, constRight1
+ ConstLeft2, constRight2
+ constLeft3, ConstRight3
+ ConstLeft4, ConstRight4
+)
+
+const (
+ ConstGroup1 unexportedType = iota
+ ConstGroup2
+ ConstGroup3
+)
+
+const ConstGroup4 ExportedType = ExportedType{}
+
+func newLongLine(ss ...string)
+
+var LongLine = newLongLine(
+ "someArgument1",
+ "someArgument2",
+ "someArgument3",
+ "someArgument4",
+ "someArgument5",
+ "someArgument6",
+ "someArgument7",
+ "someArgument8",
+)
+
+type T2 int
+
+type T1 = T2
+
+const (
+ Duplicate = iota
+ duplicate
+)
+
+// Comment about exported function with formatting.
+//
+// Example
+//
+// fmt.Println(FormattedDoc())
+//
+// Text after pre-formatted block.
+func ExportedFormattedDoc(a int) bool {
+ return true
+}
+
+type ExportedFormattedType struct {
+ // Comment before exported field with formatting.
+ //
+ // Example
+ //
+ // a.ExportedField = 123
+ //
+ // Text after pre-formatted block.
+ ExportedField int
+}