summaryrefslogtreecommitdiffstats
path: root/src/go/build
diff options
context:
space:
mode:
Diffstat (limited to 'src/go/build')
-rw-r--r--src/go/build/build.go1986
-rw-r--r--src/go/build/build_test.go712
-rw-r--r--src/go/build/constraint/expr.go574
-rw-r--r--src/go/build/constraint/expr_test.go317
-rw-r--r--src/go/build/deps_test.go877
-rw-r--r--src/go/build/doc.go98
-rw-r--r--src/go/build/gc.go17
-rw-r--r--src/go/build/gccgo.go14
-rw-r--r--src/go/build/read.go546
-rw-r--r--src/go/build/read_test.go321
-rw-r--r--src/go/build/syslist.go11
-rw-r--r--src/go/build/syslist_test.go62
-rw-r--r--src/go/build/testdata/cgo_disabled/cgo_disabled.go5
-rw-r--r--src/go/build/testdata/cgo_disabled/empty.go1
-rw-r--r--src/go/build/testdata/doc/a_test.go2
-rw-r--r--src/go/build/testdata/doc/b_test.go1
-rw-r--r--src/go/build/testdata/doc/c_test.go1
-rw-r--r--src/go/build/testdata/doc/d_test.go2
-rw-r--r--src/go/build/testdata/doc/e.go1
-rw-r--r--src/go/build/testdata/doc/f.go2
-rw-r--r--src/go/build/testdata/empty/dummy0
-rw-r--r--src/go/build/testdata/multi/file.go5
-rw-r--r--src/go/build/testdata/multi/file_appengine.go5
-rw-r--r--src/go/build/testdata/other/file/file.go5
-rw-r--r--src/go/build/testdata/other/main.go11
-rw-r--r--src/go/build/testdata/withvendor/src/a/b/b.go3
-rw-r--r--src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go1
27 files changed, 5580 insertions, 0 deletions
diff --git a/src/go/build/build.go b/src/go/build/build.go
new file mode 100644
index 0000000..217fadf
--- /dev/null
+++ b/src/go/build/build.go
@@ -0,0 +1,1986 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/doc"
+ "go/token"
+ exec "internal/execabs"
+ "internal/goroot"
+ "internal/goversion"
+ "io"
+ "io/fs"
+ "io/ioutil"
+ "os"
+ pathpkg "path"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// A Context specifies the supporting context for a build.
+type Context struct {
+ GOARCH string // target architecture
+ GOOS string // target operating system
+ GOROOT string // Go root
+ GOPATH string // Go path
+
+ // Dir is the caller's working directory, or the empty string to use
+ // the current directory of the running process. In module mode, this is used
+ // to locate the main module.
+ //
+ // If Dir is non-empty, directories passed to Import and ImportDir must
+ // be absolute.
+ Dir string
+
+ CgoEnabled bool // whether cgo files are included
+ UseAllFiles bool // use files regardless of +build lines, file names
+ Compiler string // compiler to assume when computing target paths
+
+ // The build and release tags specify build constraints
+ // that should be considered satisfied when processing +build lines.
+ // Clients creating a new context may customize BuildTags, which
+ // defaults to empty, but it is usually an error to customize ReleaseTags,
+ // which defaults to the list of Go releases the current release is compatible with.
+ // BuildTags is not set for the Default build Context.
+ // In addition to the BuildTags and ReleaseTags, build constraints
+ // consider the values of GOARCH and GOOS as satisfied tags.
+ // The last element in ReleaseTags is assumed to be the current release.
+ BuildTags []string
+ ReleaseTags []string
+
+ // The install suffix specifies a suffix to use in the name of the installation
+ // directory. By default it is empty, but custom builds that need to keep
+ // their outputs separate can set InstallSuffix to do so. For example, when
+ // using the race detector, the go command uses InstallSuffix = "race", so
+ // that on a Linux/386 system, packages are written to a directory named
+ // "linux_386_race" instead of the usual "linux_386".
+ InstallSuffix string
+
+ // By default, Import uses the operating system's file system calls
+ // to read directories and files. To read from other sources,
+ // callers can set the following functions. They all have default
+ // behaviors that use the local file system, so clients need only set
+ // the functions whose behaviors they wish to change.
+
+ // JoinPath joins the sequence of path fragments into a single path.
+ // If JoinPath is nil, Import uses filepath.Join.
+ JoinPath func(elem ...string) string
+
+ // SplitPathList splits the path list into a slice of individual paths.
+ // If SplitPathList is nil, Import uses filepath.SplitList.
+ SplitPathList func(list string) []string
+
+ // IsAbsPath reports whether path is an absolute path.
+ // If IsAbsPath is nil, Import uses filepath.IsAbs.
+ IsAbsPath func(path string) bool
+
+ // IsDir reports whether the path names a directory.
+ // If IsDir is nil, Import calls os.Stat and uses the result's IsDir method.
+ IsDir func(path string) bool
+
+ // HasSubdir reports whether dir is lexically a subdirectory of
+ // root, perhaps multiple levels below. It does not try to check
+ // whether dir exists.
+ // If so, HasSubdir sets rel to a slash-separated path that
+ // can be joined to root to produce a path equivalent to dir.
+ // If HasSubdir is nil, Import uses an implementation built on
+ // filepath.EvalSymlinks.
+ HasSubdir func(root, dir string) (rel string, ok bool)
+
+ // ReadDir returns a slice of fs.FileInfo, sorted by Name,
+ // describing the content of the named directory.
+ // If ReadDir is nil, Import uses ioutil.ReadDir.
+ ReadDir func(dir string) ([]fs.FileInfo, error)
+
+ // OpenFile opens a file (not a directory) for reading.
+ // If OpenFile is nil, Import uses os.Open.
+ OpenFile func(path string) (io.ReadCloser, error)
+}
+
+// joinPath calls ctxt.JoinPath (if not nil) or else filepath.Join.
+func (ctxt *Context) joinPath(elem ...string) string {
+ if f := ctxt.JoinPath; f != nil {
+ return f(elem...)
+ }
+ return filepath.Join(elem...)
+}
+
+// splitPathList calls ctxt.SplitPathList (if not nil) or else filepath.SplitList.
+func (ctxt *Context) splitPathList(s string) []string {
+ if f := ctxt.SplitPathList; f != nil {
+ return f(s)
+ }
+ return filepath.SplitList(s)
+}
+
+// isAbsPath calls ctxt.IsAbsPath (if not nil) or else filepath.IsAbs.
+func (ctxt *Context) isAbsPath(path string) bool {
+ if f := ctxt.IsAbsPath; f != nil {
+ return f(path)
+ }
+ return filepath.IsAbs(path)
+}
+
+// isDir calls ctxt.IsDir (if not nil) or else uses os.Stat.
+func (ctxt *Context) isDir(path string) bool {
+ if f := ctxt.IsDir; f != nil {
+ return f(path)
+ }
+ fi, err := os.Stat(path)
+ return err == nil && fi.IsDir()
+}
+
+// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses
+// the local file system to answer the question.
+func (ctxt *Context) hasSubdir(root, dir string) (rel string, ok bool) {
+ if f := ctxt.HasSubdir; f != nil {
+ return f(root, dir)
+ }
+
+ // Try using paths we received.
+ if rel, ok = hasSubdir(root, dir); ok {
+ return
+ }
+
+ // Try expanding symlinks and comparing
+ // expanded against unexpanded and
+ // expanded against expanded.
+ rootSym, _ := filepath.EvalSymlinks(root)
+ dirSym, _ := filepath.EvalSymlinks(dir)
+
+ if rel, ok = hasSubdir(rootSym, dir); ok {
+ return
+ }
+ if rel, ok = hasSubdir(root, dirSym); ok {
+ return
+ }
+ return hasSubdir(rootSym, dirSym)
+}
+
+// hasSubdir reports if dir is within root by performing lexical analysis only.
+func hasSubdir(root, dir string) (rel string, ok bool) {
+ const sep = string(filepath.Separator)
+ root = filepath.Clean(root)
+ if !strings.HasSuffix(root, sep) {
+ root += sep
+ }
+ dir = filepath.Clean(dir)
+ if !strings.HasPrefix(dir, root) {
+ return "", false
+ }
+ return filepath.ToSlash(dir[len(root):]), true
+}
+
+// readDir calls ctxt.ReadDir (if not nil) or else ioutil.ReadDir.
+func (ctxt *Context) readDir(path string) ([]fs.FileInfo, error) {
+ if f := ctxt.ReadDir; f != nil {
+ return f(path)
+ }
+ return ioutil.ReadDir(path)
+}
+
+// openFile calls ctxt.OpenFile (if not nil) or else os.Open.
+func (ctxt *Context) openFile(path string) (io.ReadCloser, error) {
+ if fn := ctxt.OpenFile; fn != nil {
+ return fn(path)
+ }
+
+ f, err := os.Open(path)
+ if err != nil {
+ return nil, err // nil interface
+ }
+ return f, nil
+}
+
+// isFile determines whether path is a file by trying to open it.
+// It reuses openFile instead of adding another function to the
+// list in Context.
+func (ctxt *Context) isFile(path string) bool {
+ f, err := ctxt.openFile(path)
+ if err != nil {
+ return false
+ }
+ f.Close()
+ return true
+}
+
+// gopath returns the list of Go path directories.
+func (ctxt *Context) gopath() []string {
+ var all []string
+ for _, p := range ctxt.splitPathList(ctxt.GOPATH) {
+ if p == "" || p == ctxt.GOROOT {
+ // Empty paths are uninteresting.
+ // If the path is the GOROOT, ignore it.
+ // People sometimes set GOPATH=$GOROOT.
+ // Do not get confused by this common mistake.
+ continue
+ }
+ if strings.HasPrefix(p, "~") {
+ // Path segments starting with ~ on Unix are almost always
+ // users who have incorrectly quoted ~ while setting GOPATH,
+ // preventing it from expanding to $HOME.
+ // The situation is made more confusing by the fact that
+ // bash allows quoted ~ in $PATH (most shells do not).
+ // Do not get confused by this, and do not try to use the path.
+ // It does not exist, and printing errors about it confuses
+ // those users even more, because they think "sure ~ exists!".
+ // The go command diagnoses this situation and prints a
+ // useful error.
+ // On Windows, ~ is used in short names, such as c:\progra~1
+ // for c:\program files.
+ continue
+ }
+ all = append(all, p)
+ }
+ return all
+}
+
+// SrcDirs returns a list of package source root directories.
+// It draws from the current Go root and Go path but omits directories
+// that do not exist.
+func (ctxt *Context) SrcDirs() []string {
+ var all []string
+ if ctxt.GOROOT != "" && ctxt.Compiler != "gccgo" {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src")
+ if ctxt.isDir(dir) {
+ all = append(all, dir)
+ }
+ }
+ for _, p := range ctxt.gopath() {
+ dir := ctxt.joinPath(p, "src")
+ if ctxt.isDir(dir) {
+ all = append(all, dir)
+ }
+ }
+ return all
+}
+
+// Default is the default Context for builds.
+// It uses the GOARCH, GOOS, GOROOT, and GOPATH environment variables
+// if set, or else the compiled code's GOARCH, GOOS, and GOROOT.
+var Default Context = defaultContext()
+
+func defaultGOPATH() string {
+ env := "HOME"
+ if runtime.GOOS == "windows" {
+ env = "USERPROFILE"
+ } else if runtime.GOOS == "plan9" {
+ env = "home"
+ }
+ if home := os.Getenv(env); home != "" {
+ def := filepath.Join(home, "go")
+ if filepath.Clean(def) == filepath.Clean(runtime.GOROOT()) {
+ // Don't set the default GOPATH to GOROOT,
+ // as that will trigger warnings from the go tool.
+ return ""
+ }
+ return def
+ }
+ return ""
+}
+
+var defaultReleaseTags []string
+
+func defaultContext() Context {
+ var c Context
+
+ c.GOARCH = envOr("GOARCH", runtime.GOARCH)
+ c.GOOS = envOr("GOOS", runtime.GOOS)
+ c.GOROOT = pathpkg.Clean(runtime.GOROOT())
+ c.GOPATH = envOr("GOPATH", defaultGOPATH())
+ c.Compiler = runtime.Compiler
+
+ // Each major Go release in the Go 1.x series adds a new
+ // "go1.x" release tag. That is, the go1.x tag is present in
+ // all releases >= Go 1.x. Code that requires Go 1.x or later
+ // should say "+build go1.x", and code that should only be
+ // built before Go 1.x (perhaps it is the stub to use in that
+ // case) should say "+build !go1.x".
+ // The last element in ReleaseTags is the current release.
+ for i := 1; i <= goversion.Version; i++ {
+ c.ReleaseTags = append(c.ReleaseTags, "go1."+strconv.Itoa(i))
+ }
+
+ defaultReleaseTags = append([]string{}, c.ReleaseTags...) // our own private copy
+
+ env := os.Getenv("CGO_ENABLED")
+ if env == "" {
+ env = defaultCGO_ENABLED
+ }
+ switch env {
+ case "1":
+ c.CgoEnabled = true
+ case "0":
+ c.CgoEnabled = false
+ default:
+ // cgo must be explicitly enabled for cross compilation builds
+ if runtime.GOARCH == c.GOARCH && runtime.GOOS == c.GOOS {
+ c.CgoEnabled = cgoEnabled[c.GOOS+"/"+c.GOARCH]
+ break
+ }
+ c.CgoEnabled = false
+ }
+
+ return c
+}
+
+func envOr(name, def string) string {
+ s := os.Getenv(name)
+ if s == "" {
+ return def
+ }
+ return s
+}
+
+// An ImportMode controls the behavior of the Import method.
+type ImportMode uint
+
+const (
+ // If FindOnly is set, Import stops after locating the directory
+ // that should contain the sources for a package. It does not
+ // read any files in the directory.
+ FindOnly ImportMode = 1 << iota
+
+ // If AllowBinary is set, Import can be satisfied by a compiled
+ // package object without corresponding sources.
+ //
+ // Deprecated:
+ // The supported way to create a compiled-only package is to
+ // write source code containing a //go:binary-only-package comment at
+ // the top of the file. Such a package will be recognized
+ // regardless of this flag setting (because it has source code)
+ // and will have BinaryOnly set to true in the returned Package.
+ AllowBinary
+
+ // If ImportComment is set, parse import comments on package statements.
+ // Import returns an error if it finds a comment it cannot understand
+ // or finds conflicting comments in multiple source files.
+ // See golang.org/s/go14customimport for more information.
+ ImportComment
+
+ // By default, Import searches vendor directories
+ // that apply in the given source directory before searching
+ // the GOROOT and GOPATH roots.
+ // If an Import finds and returns a package using a vendor
+ // directory, the resulting ImportPath is the complete path
+ // to the package, including the path elements leading up
+ // to and including "vendor".
+ // For example, if Import("y", "x/subdir", 0) finds
+ // "x/vendor/y", the returned package's ImportPath is "x/vendor/y",
+ // not plain "y".
+ // See golang.org/s/go15vendor for more information.
+ //
+ // Setting IgnoreVendor ignores vendor directories.
+ //
+ // In contrast to the package's ImportPath,
+ // the returned package's Imports, TestImports, and XTestImports
+ // are always the exact import paths from the source files:
+ // Import makes no attempt to resolve or check those paths.
+ IgnoreVendor
+)
+
+// A Package describes the Go package found in a directory.
+type Package struct {
+ Dir string // directory containing package sources
+ Name string // package name
+ ImportComment string // path in import comment on package statement
+ Doc string // documentation synopsis
+ ImportPath string // import path of package ("" if unknown)
+ Root string // root of Go tree where this package lives
+ SrcRoot string // package source root directory ("" if unknown)
+ PkgRoot string // package install root directory ("" if unknown)
+ PkgTargetRoot string // architecture dependent install root directory ("" if unknown)
+ BinDir string // command install directory ("" if unknown)
+ Goroot bool // package found in Go root
+ PkgObj string // installed .a file
+ AllTags []string // tags that can influence file selection in this directory
+ ConflictDir string // this directory shadows Dir in $GOPATH
+ BinaryOnly bool // cannot be rebuilt from source (has //go:binary-only-package comment)
+
+ // Source files
+ GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles)
+ CgoFiles []string // .go source files that import "C"
+ IgnoredGoFiles []string // .go source files ignored for this build (including ignored _test.go files)
+ InvalidGoFiles []string // .go source files with detected problems (parse error, wrong package name, and so on)
+ IgnoredOtherFiles []string // non-.go source files ignored for this build
+ CFiles []string // .c source files
+ CXXFiles []string // .cc, .cpp and .cxx source files
+ MFiles []string // .m (Objective-C) source files
+ HFiles []string // .h, .hh, .hpp and .hxx source files
+ FFiles []string // .f, .F, .for and .f90 Fortran source files
+ SFiles []string // .s source files
+ SwigFiles []string // .swig files
+ SwigCXXFiles []string // .swigcxx files
+ SysoFiles []string // .syso system object files to add to archive
+
+ // Cgo directives
+ CgoCFLAGS []string // Cgo CFLAGS directives
+ CgoCPPFLAGS []string // Cgo CPPFLAGS directives
+ CgoCXXFLAGS []string // Cgo CXXFLAGS directives
+ CgoFFLAGS []string // Cgo FFLAGS directives
+ CgoLDFLAGS []string // Cgo LDFLAGS directives
+ CgoPkgConfig []string // Cgo pkg-config directives
+
+ // Test information
+ TestGoFiles []string // _test.go files in package
+ XTestGoFiles []string // _test.go files outside package
+
+ // Dependency information
+ Imports []string // import paths from GoFiles, CgoFiles
+ ImportPos map[string][]token.Position // line information for Imports
+ TestImports []string // import paths from TestGoFiles
+ TestImportPos map[string][]token.Position // line information for TestImports
+ XTestImports []string // import paths from XTestGoFiles
+ XTestImportPos map[string][]token.Position // line information for XTestImports
+
+ // //go:embed patterns found in Go source files
+ // For example, if a source file says
+ // //go:embed a* b.c
+ // then the list will contain those two strings as separate entries.
+ // (See package embed for more details about //go:embed.)
+ EmbedPatterns []string // patterns from GoFiles, CgoFiles
+ EmbedPatternPos map[string][]token.Position // line information for EmbedPatterns
+ TestEmbedPatterns []string // patterns from TestGoFiles
+ TestEmbedPatternPos map[string][]token.Position // line information for TestEmbedPatterns
+ XTestEmbedPatterns []string // patterns from XTestGoFiles
+ XTestEmbedPatternPos map[string][]token.Position // line information for XTestEmbedPatternPos
+}
+
+// IsCommand reports whether the package is considered a
+// command to be installed (not just a library).
+// Packages named "main" are treated as commands.
+func (p *Package) IsCommand() bool {
+ return p.Name == "main"
+}
+
+// ImportDir is like Import but processes the Go package found in
+// the named directory.
+func (ctxt *Context) ImportDir(dir string, mode ImportMode) (*Package, error) {
+ return ctxt.Import(".", dir, mode)
+}
+
+// NoGoError is the error used by Import to describe a directory
+// containing no buildable Go source files. (It may still contain
+// test files, files hidden by build tags, and so on.)
+type NoGoError struct {
+ Dir string
+}
+
+func (e *NoGoError) Error() string {
+ return "no buildable Go source files in " + e.Dir
+}
+
+// MultiplePackageError describes a directory containing
+// multiple buildable Go source files for multiple packages.
+type MultiplePackageError struct {
+ Dir string // directory containing files
+ Packages []string // package names found
+ Files []string // corresponding files: Files[i] declares package Packages[i]
+}
+
+func (e *MultiplePackageError) Error() string {
+ // Error string limited to two entries for compatibility.
+ return fmt.Sprintf("found packages %s (%s) and %s (%s) in %s", e.Packages[0], e.Files[0], e.Packages[1], e.Files[1], e.Dir)
+}
+
+func nameExt(name string) string {
+ i := strings.LastIndex(name, ".")
+ if i < 0 {
+ return ""
+ }
+ return name[i:]
+}
+
+// Import returns details about the Go package named by the import path,
+// interpreting local import paths relative to the srcDir directory.
+// If the path is a local import path naming a package that can be imported
+// using a standard import path, the returned package will set p.ImportPath
+// to that path.
+//
+// In the directory containing the package, .go, .c, .h, and .s files are
+// considered part of the package except for:
+//
+// - .go files in package documentation
+// - files starting with _ or . (likely editor temporary files)
+// - files with build constraints not satisfied by the context
+//
+// If an error occurs, Import returns a non-nil error and a non-nil
+// *Package containing partial information.
+//
+func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) {
+ p := &Package{
+ ImportPath: path,
+ }
+ if path == "" {
+ return p, fmt.Errorf("import %q: invalid import path", path)
+ }
+
+ var pkgtargetroot string
+ var pkga string
+ var pkgerr error
+ suffix := ""
+ if ctxt.InstallSuffix != "" {
+ suffix = "_" + ctxt.InstallSuffix
+ }
+ switch ctxt.Compiler {
+ case "gccgo":
+ pkgtargetroot = "pkg/gccgo_" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix
+ case "gc":
+ pkgtargetroot = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix
+ default:
+ // Save error for end of function.
+ pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler)
+ }
+ setPkga := func() {
+ switch ctxt.Compiler {
+ case "gccgo":
+ dir, elem := pathpkg.Split(p.ImportPath)
+ pkga = pkgtargetroot + "/" + dir + "lib" + elem + ".a"
+ case "gc":
+ pkga = pkgtargetroot + "/" + p.ImportPath + ".a"
+ }
+ }
+ setPkga()
+
+ binaryOnly := false
+ if IsLocalImport(path) {
+ pkga = "" // local imports have no installed path
+ if srcDir == "" {
+ return p, fmt.Errorf("import %q: import relative to unknown directory", path)
+ }
+ if !ctxt.isAbsPath(path) {
+ p.Dir = ctxt.joinPath(srcDir, path)
+ }
+ // p.Dir directory may or may not exist. Gather partial information first, check if it exists later.
+ // Determine canonical import path, if any.
+ // Exclude results where the import path would include /testdata/.
+ inTestdata := func(sub string) bool {
+ return strings.Contains(sub, "/testdata/") || strings.HasSuffix(sub, "/testdata") || strings.HasPrefix(sub, "testdata/") || sub == "testdata"
+ }
+ if ctxt.GOROOT != "" {
+ root := ctxt.joinPath(ctxt.GOROOT, "src")
+ if sub, ok := ctxt.hasSubdir(root, p.Dir); ok && !inTestdata(sub) {
+ p.Goroot = true
+ p.ImportPath = sub
+ p.Root = ctxt.GOROOT
+ setPkga() // p.ImportPath changed
+ goto Found
+ }
+ }
+ all := ctxt.gopath()
+ for i, root := range all {
+ rootsrc := ctxt.joinPath(root, "src")
+ if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok && !inTestdata(sub) {
+ // We found a potential import path for dir,
+ // but check that using it wouldn't find something
+ // else first.
+ if ctxt.GOROOT != "" && ctxt.Compiler != "gccgo" {
+ if dir := ctxt.joinPath(ctxt.GOROOT, "src", sub); ctxt.isDir(dir) {
+ p.ConflictDir = dir
+ goto Found
+ }
+ }
+ for _, earlyRoot := range all[:i] {
+ if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) {
+ p.ConflictDir = dir
+ goto Found
+ }
+ }
+
+ // sub would not name some other directory instead of this one.
+ // Record it.
+ p.ImportPath = sub
+ p.Root = root
+ setPkga() // p.ImportPath changed
+ goto Found
+ }
+ }
+ // It's okay that we didn't find a root containing dir.
+ // Keep going with the information we have.
+ } else {
+ if strings.HasPrefix(path, "/") {
+ return p, fmt.Errorf("import %q: cannot import absolute path", path)
+ }
+
+ if err := ctxt.importGo(p, path, srcDir, mode); err == nil {
+ goto Found
+ } else if err != errNoModules {
+ return p, err
+ }
+
+ gopath := ctxt.gopath() // needed twice below; avoid computing many times
+
+ // tried records the location of unsuccessful package lookups
+ var tried struct {
+ vendor []string
+ goroot string
+ gopath []string
+ }
+
+ // Vendor directories get first chance to satisfy import.
+ if mode&IgnoreVendor == 0 && srcDir != "" {
+ searchVendor := func(root string, isGoroot bool) bool {
+ sub, ok := ctxt.hasSubdir(root, srcDir)
+ if !ok || !strings.HasPrefix(sub, "src/") || strings.Contains(sub, "/testdata/") {
+ return false
+ }
+ for {
+ vendor := ctxt.joinPath(root, sub, "vendor")
+ if ctxt.isDir(vendor) {
+ dir := ctxt.joinPath(vendor, path)
+ if ctxt.isDir(dir) && hasGoFiles(ctxt, dir) {
+ p.Dir = dir
+ p.ImportPath = strings.TrimPrefix(pathpkg.Join(sub, "vendor", path), "src/")
+ p.Goroot = isGoroot
+ p.Root = root
+ setPkga() // p.ImportPath changed
+ return true
+ }
+ tried.vendor = append(tried.vendor, dir)
+ }
+ i := strings.LastIndex(sub, "/")
+ if i < 0 {
+ break
+ }
+ sub = sub[:i]
+ }
+ return false
+ }
+ if ctxt.Compiler != "gccgo" && searchVendor(ctxt.GOROOT, true) {
+ goto Found
+ }
+ for _, root := range gopath {
+ if searchVendor(root, false) {
+ goto Found
+ }
+ }
+ }
+
+ // Determine directory from import path.
+ if ctxt.GOROOT != "" {
+ // If the package path starts with "vendor/", only search GOROOT before
+ // GOPATH if the importer is also within GOROOT. That way, if the user has
+ // vendored in a package that is subsequently included in the standard
+ // distribution, they'll continue to pick up their own vendored copy.
+ gorootFirst := srcDir == "" || !strings.HasPrefix(path, "vendor/")
+ if !gorootFirst {
+ _, gorootFirst = ctxt.hasSubdir(ctxt.GOROOT, srcDir)
+ }
+ if gorootFirst {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src", path)
+ if ctxt.Compiler != "gccgo" {
+ isDir := ctxt.isDir(dir)
+ binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
+ if isDir || binaryOnly {
+ p.Dir = dir
+ p.Goroot = true
+ p.Root = ctxt.GOROOT
+ goto Found
+ }
+ }
+ tried.goroot = dir
+ }
+ }
+ if ctxt.Compiler == "gccgo" && goroot.IsStandardPackage(ctxt.GOROOT, ctxt.Compiler, path) {
+ p.Dir = ctxt.joinPath(ctxt.GOROOT, "src", path)
+ p.Goroot = true
+ p.Root = ctxt.GOROOT
+ goto Found
+ }
+ for _, root := range gopath {
+ dir := ctxt.joinPath(root, "src", path)
+ isDir := ctxt.isDir(dir)
+ binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga))
+ if isDir || binaryOnly {
+ p.Dir = dir
+ p.Root = root
+ goto Found
+ }
+ tried.gopath = append(tried.gopath, dir)
+ }
+
+ // If we tried GOPATH first due to a "vendor/" prefix, fall back to GOPATH.
+ // That way, the user can still get useful results from 'go list' for
+ // standard-vendored paths passed on the command line.
+ if ctxt.GOROOT != "" && tried.goroot == "" {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src", path)
+ if ctxt.Compiler != "gccgo" {
+ isDir := ctxt.isDir(dir)
+ binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
+ if isDir || binaryOnly {
+ p.Dir = dir
+ p.Goroot = true
+ p.Root = ctxt.GOROOT
+ goto Found
+ }
+ }
+ tried.goroot = dir
+ }
+
+ // package was not found
+ var paths []string
+ format := "\t%s (vendor tree)"
+ for _, dir := range tried.vendor {
+ paths = append(paths, fmt.Sprintf(format, dir))
+ format = "\t%s"
+ }
+ if tried.goroot != "" {
+ paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot))
+ } else {
+ paths = append(paths, "\t($GOROOT not set)")
+ }
+ format = "\t%s (from $GOPATH)"
+ for _, dir := range tried.gopath {
+ paths = append(paths, fmt.Sprintf(format, dir))
+ format = "\t%s"
+ }
+ if len(tried.gopath) == 0 {
+ paths = append(paths, "\t($GOPATH not set. For more details see: 'go help gopath')")
+ }
+ return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n"))
+ }
+
+Found:
+ if p.Root != "" {
+ p.SrcRoot = ctxt.joinPath(p.Root, "src")
+ p.PkgRoot = ctxt.joinPath(p.Root, "pkg")
+ p.BinDir = ctxt.joinPath(p.Root, "bin")
+ if pkga != "" {
+ p.PkgTargetRoot = ctxt.joinPath(p.Root, pkgtargetroot)
+ p.PkgObj = ctxt.joinPath(p.Root, pkga)
+ }
+ }
+
+ // If it's a local import path, by the time we get here, we still haven't checked
+ // that p.Dir directory exists. This is the right time to do that check.
+ // We can't do it earlier, because we want to gather partial information for the
+ // non-nil *Package returned when an error occurs.
+ // We need to do this before we return early on FindOnly flag.
+ if IsLocalImport(path) && !ctxt.isDir(p.Dir) {
+ if ctxt.Compiler == "gccgo" && p.Goroot {
+ // gccgo has no sources for GOROOT packages.
+ return p, nil
+ }
+
+ // package was not found
+ return p, fmt.Errorf("cannot find package %q in:\n\t%s", path, p.Dir)
+ }
+
+ if mode&FindOnly != 0 {
+ return p, pkgerr
+ }
+ if binaryOnly && (mode&AllowBinary) != 0 {
+ return p, pkgerr
+ }
+
+ if ctxt.Compiler == "gccgo" && p.Goroot {
+ // gccgo has no sources for GOROOT packages.
+ return p, nil
+ }
+
+ dirs, err := ctxt.readDir(p.Dir)
+ if err != nil {
+ return p, err
+ }
+
+ var badGoError error
+ var Sfiles []string // files with ".S"(capital S)/.sx(capital s equivalent for case insensitive filesystems)
+ var firstFile, firstCommentFile string
+ embedPos := make(map[string][]token.Position)
+ testEmbedPos := make(map[string][]token.Position)
+ xTestEmbedPos := make(map[string][]token.Position)
+ importPos := make(map[string][]token.Position)
+ testImportPos := make(map[string][]token.Position)
+ xTestImportPos := make(map[string][]token.Position)
+ allTags := make(map[string]bool)
+ fset := token.NewFileSet()
+ for _, d := range dirs {
+ if d.IsDir() {
+ continue
+ }
+ if d.Mode()&fs.ModeSymlink != 0 {
+ if ctxt.isDir(ctxt.joinPath(p.Dir, d.Name())) {
+ // Symlinks to directories are not source files.
+ continue
+ }
+ }
+
+ name := d.Name()
+ ext := nameExt(name)
+
+ badFile := func(err error) {
+ if badGoError == nil {
+ badGoError = err
+ }
+ p.InvalidGoFiles = append(p.InvalidGoFiles, name)
+ }
+
+ info, err := ctxt.matchFile(p.Dir, name, allTags, &p.BinaryOnly, fset)
+ if err != nil {
+ badFile(err)
+ continue
+ }
+ if info == nil {
+ if strings.HasPrefix(name, "_") || strings.HasPrefix(name, ".") {
+ // not due to build constraints - don't report
+ } else if ext == ".go" {
+ p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
+ } else if fileListForExt(p, ext) != nil {
+ p.IgnoredOtherFiles = append(p.IgnoredOtherFiles, name)
+ }
+ continue
+ }
+ data, filename := info.header, info.name
+
+ // Going to save the file. For non-Go files, can stop here.
+ switch ext {
+ case ".go":
+ // keep going
+ case ".S", ".sx":
+ // special case for cgo, handled at end
+ Sfiles = append(Sfiles, name)
+ continue
+ default:
+ if list := fileListForExt(p, ext); list != nil {
+ *list = append(*list, name)
+ }
+ continue
+ }
+
+ if info.parseErr != nil {
+ badFile(info.parseErr)
+ continue
+ }
+ pf := info.parsed
+
+ pkg := pf.Name.Name
+ if pkg == "documentation" {
+ p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
+ continue
+ }
+
+ isTest := strings.HasSuffix(name, "_test.go")
+ isXTest := false
+ if isTest && strings.HasSuffix(pkg, "_test") {
+ isXTest = true
+ pkg = pkg[:len(pkg)-len("_test")]
+ }
+
+ if p.Name == "" {
+ p.Name = pkg
+ firstFile = name
+ } else if pkg != p.Name {
+ badFile(&MultiplePackageError{
+ Dir: p.Dir,
+ Packages: []string{p.Name, pkg},
+ Files: []string{firstFile, name},
+ })
+ p.InvalidGoFiles = append(p.InvalidGoFiles, name)
+ }
+ // Grab the first package comment as docs, provided it is not from a test file.
+ if pf.Doc != nil && p.Doc == "" && !isTest && !isXTest {
+ p.Doc = doc.Synopsis(pf.Doc.Text())
+ }
+
+ if mode&ImportComment != 0 {
+ qcom, line := findImportComment(data)
+ if line != 0 {
+ com, err := strconv.Unquote(qcom)
+ if err != nil {
+ badFile(fmt.Errorf("%s:%d: cannot parse import comment", filename, line))
+ } else if p.ImportComment == "" {
+ p.ImportComment = com
+ firstCommentFile = name
+ } else if p.ImportComment != com {
+ badFile(fmt.Errorf("found import comments %q (%s) and %q (%s) in %s", p.ImportComment, firstCommentFile, com, name, p.Dir))
+ }
+ }
+ }
+
+ // Record imports and information about cgo.
+ isCgo := false
+ for _, imp := range info.imports {
+ if imp.path == "C" {
+ if isTest {
+ badFile(fmt.Errorf("use of cgo in test %s not supported", filename))
+ continue
+ }
+ isCgo = true
+ if imp.doc != nil {
+ if err := ctxt.saveCgo(filename, p, imp.doc); err != nil {
+ badFile(err)
+ }
+ }
+ }
+ }
+
+ var fileList *[]string
+ var importMap, embedMap map[string][]token.Position
+ switch {
+ case isCgo:
+ allTags["cgo"] = true
+ if ctxt.CgoEnabled {
+ fileList = &p.CgoFiles
+ importMap = importPos
+ embedMap = embedPos
+ } else {
+ // Ignore imports and embeds from cgo files if cgo is disabled.
+ fileList = &p.IgnoredGoFiles
+ }
+ case isXTest:
+ fileList = &p.XTestGoFiles
+ importMap = xTestImportPos
+ embedMap = xTestEmbedPos
+ case isTest:
+ fileList = &p.TestGoFiles
+ importMap = testImportPos
+ embedMap = testEmbedPos
+ default:
+ fileList = &p.GoFiles
+ importMap = importPos
+ embedMap = embedPos
+ }
+ *fileList = append(*fileList, name)
+ if importMap != nil {
+ for _, imp := range info.imports {
+ importMap[imp.path] = append(importMap[imp.path], fset.Position(imp.pos))
+ }
+ }
+ if embedMap != nil {
+ for _, emb := range info.embeds {
+ embedMap[emb.pattern] = append(embedMap[emb.pattern], emb.pos)
+ }
+ }
+ }
+
+ for tag := range allTags {
+ p.AllTags = append(p.AllTags, tag)
+ }
+ sort.Strings(p.AllTags)
+
+ p.EmbedPatterns, p.EmbedPatternPos = cleanDecls(embedPos)
+ p.TestEmbedPatterns, p.TestEmbedPatternPos = cleanDecls(testEmbedPos)
+ p.XTestEmbedPatterns, p.XTestEmbedPatternPos = cleanDecls(xTestEmbedPos)
+
+ p.Imports, p.ImportPos = cleanDecls(importPos)
+ p.TestImports, p.TestImportPos = cleanDecls(testImportPos)
+ p.XTestImports, p.XTestImportPos = cleanDecls(xTestImportPos)
+
+ // add the .S/.sx files only if we are using cgo
+ // (which means gcc will compile them).
+ // The standard assemblers expect .s files.
+ if len(p.CgoFiles) > 0 {
+ p.SFiles = append(p.SFiles, Sfiles...)
+ sort.Strings(p.SFiles)
+ } else {
+ p.IgnoredOtherFiles = append(p.IgnoredOtherFiles, Sfiles...)
+ sort.Strings(p.IgnoredOtherFiles)
+ }
+
+ if badGoError != nil {
+ return p, badGoError
+ }
+ if len(p.GoFiles)+len(p.CgoFiles)+len(p.TestGoFiles)+len(p.XTestGoFiles) == 0 {
+ return p, &NoGoError{p.Dir}
+ }
+ return p, pkgerr
+}
+
+func fileListForExt(p *Package, ext string) *[]string {
+ switch ext {
+ case ".c":
+ return &p.CFiles
+ case ".cc", ".cpp", ".cxx":
+ return &p.CXXFiles
+ case ".m":
+ return &p.MFiles
+ case ".h", ".hh", ".hpp", ".hxx":
+ return &p.HFiles
+ case ".f", ".F", ".for", ".f90":
+ return &p.FFiles
+ case ".s", ".S", ".sx":
+ return &p.SFiles
+ case ".swig":
+ return &p.SwigFiles
+ case ".swigcxx":
+ return &p.SwigCXXFiles
+ case ".syso":
+ return &p.SysoFiles
+ }
+ return nil
+}
+
+func uniq(list []string) []string {
+ if list == nil {
+ return nil
+ }
+ out := make([]string, len(list))
+ copy(out, list)
+ sort.Strings(out)
+ uniq := out[:0]
+ for _, x := range out {
+ if len(uniq) == 0 || uniq[len(uniq)-1] != x {
+ uniq = append(uniq, x)
+ }
+ }
+ return uniq
+}
+
+var errNoModules = errors.New("not using modules")
+
+// importGo checks whether it can use the go command to find the directory for path.
+// If using the go command is not appropriate, importGo returns errNoModules.
+// Otherwise, importGo tries using the go command and reports whether that succeeded.
+// Using the go command lets build.Import and build.Context.Import find code
+// in Go modules. In the long term we want tools to use go/packages (currently golang.org/x/tools/go/packages),
+// which will also use the go command.
+// Invoking the go command here is not very efficient in that it computes information
+// about the requested package and all dependencies and then only reports about the requested package.
+// Then we reinvoke it for every dependency. But this is still better than not working at all.
+// See golang.org/issue/26504.
+func (ctxt *Context) importGo(p *Package, path, srcDir string, mode ImportMode) error {
+ // To invoke the go command,
+ // we must not being doing special things like AllowBinary or IgnoreVendor,
+ // and all the file system callbacks must be nil (we're meant to use the local file system).
+ if mode&AllowBinary != 0 || mode&IgnoreVendor != 0 ||
+ ctxt.JoinPath != nil || ctxt.SplitPathList != nil || ctxt.IsAbsPath != nil || ctxt.IsDir != nil || ctxt.HasSubdir != nil || ctxt.ReadDir != nil || ctxt.OpenFile != nil || !equal(ctxt.ReleaseTags, defaultReleaseTags) {
+ return errNoModules
+ }
+
+ // Predict whether module aware mode is enabled by checking the value of
+ // GO111MODULE and looking for a go.mod file in the source directory or
+ // one of its parents. Running 'go env GOMOD' in the source directory would
+ // give a canonical answer, but we'd prefer not to execute another command.
+ go111Module := os.Getenv("GO111MODULE")
+ switch go111Module {
+ case "off":
+ return errNoModules
+ default: // "", "on", "auto", anything else
+ // Maybe use modules.
+ }
+
+ if srcDir != "" {
+ var absSrcDir string
+ if filepath.IsAbs(srcDir) {
+ absSrcDir = srcDir
+ } else if ctxt.Dir != "" {
+ return fmt.Errorf("go/build: Dir is non-empty, so relative srcDir is not allowed: %v", srcDir)
+ } else {
+ // Find the absolute source directory. hasSubdir does not handle
+ // relative paths (and can't because the callbacks don't support this).
+ var err error
+ absSrcDir, err = filepath.Abs(srcDir)
+ if err != nil {
+ return errNoModules
+ }
+ }
+
+ // If the source directory is in GOROOT, then the in-process code works fine
+ // and we should keep using it. Moreover, the 'go list' approach below doesn't
+ // take standard-library vendoring into account and will fail.
+ if _, ok := ctxt.hasSubdir(filepath.Join(ctxt.GOROOT, "src"), absSrcDir); ok {
+ return errNoModules
+ }
+ }
+
+ // For efficiency, if path is a standard library package, let the usual lookup code handle it.
+ if ctxt.GOROOT != "" {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src", path)
+ if ctxt.isDir(dir) {
+ return errNoModules
+ }
+ }
+
+ // If GO111MODULE=auto, look to see if there is a go.mod.
+ // Since go1.13, it doesn't matter if we're inside GOPATH.
+ if go111Module == "auto" {
+ var (
+ parent string
+ err error
+ )
+ if ctxt.Dir == "" {
+ parent, err = os.Getwd()
+ if err != nil {
+ // A nonexistent working directory can't be in a module.
+ return errNoModules
+ }
+ } else {
+ parent, err = filepath.Abs(ctxt.Dir)
+ if err != nil {
+ // If the caller passed a bogus Dir explicitly, that's materially
+ // different from not having modules enabled.
+ return err
+ }
+ }
+ for {
+ if f, err := ctxt.openFile(ctxt.joinPath(parent, "go.mod")); err == nil {
+ buf := make([]byte, 100)
+ _, err := f.Read(buf)
+ f.Close()
+ if err == nil || err == io.EOF {
+ // go.mod exists and is readable (is a file, not a directory).
+ break
+ }
+ }
+ d := filepath.Dir(parent)
+ if len(d) >= len(parent) {
+ return errNoModules // reached top of file system, no go.mod
+ }
+ parent = d
+ }
+ }
+
+ cmd := exec.Command("go", "list", "-e", "-compiler="+ctxt.Compiler, "-tags="+strings.Join(ctxt.BuildTags, ","), "-installsuffix="+ctxt.InstallSuffix, "-f={{.Dir}}\n{{.ImportPath}}\n{{.Root}}\n{{.Goroot}}\n{{if .Error}}{{.Error}}{{end}}\n", "--", path)
+
+ if ctxt.Dir != "" {
+ cmd.Dir = ctxt.Dir
+ }
+
+ var stdout, stderr strings.Builder
+ cmd.Stdout = &stdout
+ cmd.Stderr = &stderr
+
+ cgo := "0"
+ if ctxt.CgoEnabled {
+ cgo = "1"
+ }
+ cmd.Env = append(os.Environ(),
+ "GOOS="+ctxt.GOOS,
+ "GOARCH="+ctxt.GOARCH,
+ "GOROOT="+ctxt.GOROOT,
+ "GOPATH="+ctxt.GOPATH,
+ "CGO_ENABLED="+cgo,
+ )
+
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("go/build: go list %s: %v\n%s\n", path, err, stderr.String())
+ }
+
+ f := strings.SplitN(stdout.String(), "\n", 5)
+ if len(f) != 5 {
+ return fmt.Errorf("go/build: importGo %s: unexpected output:\n%s\n", path, stdout.String())
+ }
+ dir := f[0]
+ errStr := strings.TrimSpace(f[4])
+ if errStr != "" && dir == "" {
+ // If 'go list' could not locate the package (dir is empty),
+ // return the same error that 'go list' reported.
+ return errors.New(errStr)
+ }
+
+ // If 'go list' did locate the package, ignore the error.
+ // It was probably related to loading source files, and we'll
+ // encounter it ourselves shortly if the FindOnly flag isn't set.
+ p.Dir = dir
+ p.ImportPath = f[1]
+ p.Root = f[2]
+ p.Goroot = f[3] == "true"
+ return nil
+}
+
+func equal(x, y []string) bool {
+ if len(x) != len(y) {
+ return false
+ }
+ for i, xi := range x {
+ if xi != y[i] {
+ return false
+ }
+ }
+ return true
+}
+
+// hasGoFiles reports whether dir contains any files with names ending in .go.
+// For a vendor check we must exclude directories that contain no .go files.
+// Otherwise it is not possible to vendor just a/b/c and still import the
+// non-vendored a/b. See golang.org/issue/13832.
+func hasGoFiles(ctxt *Context, dir string) bool {
+ ents, _ := ctxt.readDir(dir)
+ for _, ent := range ents {
+ if !ent.IsDir() && strings.HasSuffix(ent.Name(), ".go") {
+ return true
+ }
+ }
+ return false
+}
+
+func findImportComment(data []byte) (s string, line int) {
+ // expect keyword package
+ word, data := parseWord(data)
+ if string(word) != "package" {
+ return "", 0
+ }
+
+ // expect package name
+ _, data = parseWord(data)
+
+ // now ready for import comment, a // or /* */ comment
+ // beginning and ending on the current line.
+ for len(data) > 0 && (data[0] == ' ' || data[0] == '\t' || data[0] == '\r') {
+ data = data[1:]
+ }
+
+ var comment []byte
+ switch {
+ case bytes.HasPrefix(data, slashSlash):
+ i := bytes.Index(data, newline)
+ if i < 0 {
+ i = len(data)
+ }
+ comment = data[2:i]
+ case bytes.HasPrefix(data, slashStar):
+ data = data[2:]
+ i := bytes.Index(data, starSlash)
+ if i < 0 {
+ // malformed comment
+ return "", 0
+ }
+ comment = data[:i]
+ if bytes.Contains(comment, newline) {
+ return "", 0
+ }
+ }
+ comment = bytes.TrimSpace(comment)
+
+ // split comment into `import`, `"pkg"`
+ word, arg := parseWord(comment)
+ if string(word) != "import" {
+ return "", 0
+ }
+
+ line = 1 + bytes.Count(data[:cap(data)-cap(arg)], newline)
+ return strings.TrimSpace(string(arg)), line
+}
+
+var (
+ slashSlash = []byte("//")
+ slashStar = []byte("/*")
+ starSlash = []byte("*/")
+ newline = []byte("\n")
+)
+
+// skipSpaceOrComment returns data with any leading spaces or comments removed.
+func skipSpaceOrComment(data []byte) []byte {
+ for len(data) > 0 {
+ switch data[0] {
+ case ' ', '\t', '\r', '\n':
+ data = data[1:]
+ continue
+ case '/':
+ if bytes.HasPrefix(data, slashSlash) {
+ i := bytes.Index(data, newline)
+ if i < 0 {
+ return nil
+ }
+ data = data[i+1:]
+ continue
+ }
+ if bytes.HasPrefix(data, slashStar) {
+ data = data[2:]
+ i := bytes.Index(data, starSlash)
+ if i < 0 {
+ return nil
+ }
+ data = data[i+2:]
+ continue
+ }
+ }
+ break
+ }
+ return data
+}
+
+// parseWord skips any leading spaces or comments in data
+// and then parses the beginning of data as an identifier or keyword,
+// returning that word and what remains after the word.
+func parseWord(data []byte) (word, rest []byte) {
+ data = skipSpaceOrComment(data)
+
+ // Parse past leading word characters.
+ rest = data
+ for {
+ r, size := utf8.DecodeRune(rest)
+ if unicode.IsLetter(r) || '0' <= r && r <= '9' || r == '_' {
+ rest = rest[size:]
+ continue
+ }
+ break
+ }
+
+ word = data[:len(data)-len(rest)]
+ if len(word) == 0 {
+ return nil, nil
+ }
+
+ return word, rest
+}
+
+// MatchFile reports whether the file with the given name in the given directory
+// matches the context and would be included in a Package created by ImportDir
+// of that directory.
+//
+// MatchFile considers the name of the file and may use ctxt.OpenFile to
+// read some or all of the file's content.
+func (ctxt *Context) MatchFile(dir, name string) (match bool, err error) {
+ info, err := ctxt.matchFile(dir, name, nil, nil, nil)
+ return info != nil, err
+}
+
+var dummyPkg Package
+
+// fileInfo records information learned about a file included in a build.
+type fileInfo struct {
+ name string // full name including dir
+ header []byte
+ fset *token.FileSet
+ parsed *ast.File
+ parseErr error
+ imports []fileImport
+ embeds []fileEmbed
+ embedErr error
+}
+
+type fileImport struct {
+ path string
+ pos token.Pos
+ doc *ast.CommentGroup
+}
+
+type fileEmbed struct {
+ pattern string
+ pos token.Position
+}
+
+// matchFile determines whether the file with the given name in the given directory
+// should be included in the package being constructed.
+// If the file should be included, matchFile returns a non-nil *fileInfo (and a nil error).
+// Non-nil errors are reserved for unexpected problems.
+//
+// If name denotes a Go program, matchFile reads until the end of the
+// imports and returns that section of the file in the fileInfo's header field,
+// even though it only considers text until the first non-comment
+// for +build lines.
+//
+// If allTags is non-nil, matchFile records any encountered build tag
+// by setting allTags[tag] = true.
+func (ctxt *Context) matchFile(dir, name string, allTags map[string]bool, binaryOnly *bool, fset *token.FileSet) (*fileInfo, error) {
+ if strings.HasPrefix(name, "_") ||
+ strings.HasPrefix(name, ".") {
+ return nil, nil
+ }
+
+ i := strings.LastIndex(name, ".")
+ if i < 0 {
+ i = len(name)
+ }
+ ext := name[i:]
+
+ if !ctxt.goodOSArchFile(name, allTags) && !ctxt.UseAllFiles {
+ return nil, nil
+ }
+
+ if ext != ".go" && fileListForExt(&dummyPkg, ext) == nil {
+ // skip
+ return nil, nil
+ }
+
+ info := &fileInfo{name: ctxt.joinPath(dir, name), fset: fset}
+ if ext == ".syso" {
+ // binary, no reading
+ return info, nil
+ }
+
+ f, err := ctxt.openFile(info.name)
+ if err != nil {
+ return nil, err
+ }
+
+ if strings.HasSuffix(name, ".go") {
+ err = readGoInfo(f, info)
+ if strings.HasSuffix(name, "_test.go") {
+ binaryOnly = nil // ignore //go:binary-only-package comments in _test.go files
+ }
+ } else {
+ binaryOnly = nil // ignore //go:binary-only-package comments in non-Go sources
+ info.header, err = readComments(f)
+ }
+ f.Close()
+ if err != nil {
+ return nil, fmt.Errorf("read %s: %v", info.name, err)
+ }
+
+ // Look for +build comments to accept or reject the file.
+ ok, sawBinaryOnly, err := ctxt.shouldBuild(info.header, allTags)
+ if err != nil {
+ return nil, err
+ }
+ if !ok && !ctxt.UseAllFiles {
+ return nil, nil
+ }
+
+ if binaryOnly != nil && sawBinaryOnly {
+ *binaryOnly = true
+ }
+
+ return info, nil
+}
+
+func cleanDecls(m map[string][]token.Position) ([]string, map[string][]token.Position) {
+ all := make([]string, 0, len(m))
+ for path := range m {
+ all = append(all, path)
+ }
+ sort.Strings(all)
+ return all, m
+}
+
+// Import is shorthand for Default.Import.
+func Import(path, srcDir string, mode ImportMode) (*Package, error) {
+ return Default.Import(path, srcDir, mode)
+}
+
+// ImportDir is shorthand for Default.ImportDir.
+func ImportDir(dir string, mode ImportMode) (*Package, error) {
+ return Default.ImportDir(dir, mode)
+}
+
+var (
+ bSlashSlash = []byte(slashSlash)
+ bStarSlash = []byte(starSlash)
+ bSlashStar = []byte(slashStar)
+
+ goBuildComment = []byte("//go:build")
+
+ errGoBuildWithoutBuild = errors.New("//go:build comment without // +build comment")
+ errMultipleGoBuild = errors.New("multiple //go:build comments") // unused in Go 1.(N-1)
+)
+
+func isGoBuildComment(line []byte) bool {
+ if !bytes.HasPrefix(line, goBuildComment) {
+ return false
+ }
+ line = bytes.TrimSpace(line)
+ rest := line[len(goBuildComment):]
+ return len(rest) == 0 || len(bytes.TrimSpace(rest)) < len(rest)
+}
+
+// Special comment denoting a binary-only package.
+// See https://golang.org/design/2775-binary-only-packages
+// for more about the design of binary-only packages.
+var binaryOnlyComment = []byte("//go:binary-only-package")
+
+// shouldBuild reports whether it is okay to use this file,
+// The rule is that in the file's leading run of // comments
+// and blank lines, which must be followed by a blank line
+// (to avoid including a Go package clause doc comment),
+// lines beginning with '// +build' are taken as build directives.
+//
+// The file is accepted only if each such line lists something
+// matching the file. For example:
+//
+// // +build windows linux
+//
+// marks the file as applicable only on Windows and Linux.
+//
+// For each build tag it consults, shouldBuild sets allTags[tag] = true.
+//
+// shouldBuild reports whether the file should be built
+// and whether a //go:binary-only-package comment was found.
+func (ctxt *Context) shouldBuild(content []byte, allTags map[string]bool) (shouldBuild, binaryOnly bool, err error) {
+
+ // Pass 1. Identify leading run of // comments and blank lines,
+ // which must be followed by a blank line.
+ // Also identify any //go:build comments.
+ content, goBuild, sawBinaryOnly, err := parseFileHeader(content)
+ if err != nil {
+ return false, false, err
+ }
+
+ // Pass 2. Process each +build line in the run.
+ p := content
+ shouldBuild = true
+ sawBuild := false
+ for len(p) > 0 {
+ line := p
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, p = line[:i], p[i+1:]
+ } else {
+ p = p[len(p):]
+ }
+ line = bytes.TrimSpace(line)
+ if !bytes.HasPrefix(line, bSlashSlash) {
+ continue
+ }
+ line = bytes.TrimSpace(line[len(bSlashSlash):])
+ if len(line) > 0 && line[0] == '+' {
+ // Looks like a comment +line.
+ f := strings.Fields(string(line))
+ if f[0] == "+build" {
+ sawBuild = true
+ ok := false
+ for _, tok := range f[1:] {
+ if ctxt.match(tok, allTags) {
+ ok = true
+ }
+ }
+ if !ok {
+ shouldBuild = false
+ }
+ }
+ }
+ }
+
+ if goBuild != nil && !sawBuild {
+ return false, false, errGoBuildWithoutBuild
+ }
+
+ return shouldBuild, sawBinaryOnly, nil
+}
+
+func parseFileHeader(content []byte) (trimmed, goBuild []byte, sawBinaryOnly bool, err error) {
+ end := 0
+ p := content
+ ended := false // found non-blank, non-// line, so stopped accepting // +build lines
+ inSlashStar := false // in /* */ comment
+
+Lines:
+ for len(p) > 0 {
+ line := p
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, p = line[:i], p[i+1:]
+ } else {
+ p = p[len(p):]
+ }
+ line = bytes.TrimSpace(line)
+ if len(line) == 0 && !ended { // Blank line
+ // Remember position of most recent blank line.
+ // When we find the first non-blank, non-// line,
+ // this "end" position marks the latest file position
+ // where a // +build line can appear.
+ // (It must appear _before_ a blank line before the non-blank, non-// line.
+ // Yes, that's confusing, which is part of why we moved to //go:build lines.)
+ // Note that ended==false here means that inSlashStar==false,
+ // since seeing a /* would have set ended==true.
+ end = len(content) - len(p)
+ continue Lines
+ }
+ if !bytes.HasPrefix(line, slashSlash) { // Not comment line
+ ended = true
+ }
+
+ if !inSlashStar && isGoBuildComment(line) {
+ if false && goBuild != nil { // enabled in Go 1.N
+ return nil, nil, false, errMultipleGoBuild
+ }
+ goBuild = line
+ }
+ if !inSlashStar && bytes.Equal(line, binaryOnlyComment) {
+ sawBinaryOnly = true
+ }
+
+ Comments:
+ for len(line) > 0 {
+ if inSlashStar {
+ if i := bytes.Index(line, starSlash); i >= 0 {
+ inSlashStar = false
+ line = bytes.TrimSpace(line[i+len(starSlash):])
+ continue Comments
+ }
+ continue Lines
+ }
+ if bytes.HasPrefix(line, bSlashSlash) {
+ continue Lines
+ }
+ if bytes.HasPrefix(line, bSlashStar) {
+ inSlashStar = true
+ line = bytes.TrimSpace(line[len(bSlashStar):])
+ continue Comments
+ }
+ // Found non-comment text.
+ break Lines
+ }
+ }
+
+ return content[:end], goBuild, sawBinaryOnly, nil
+}
+
+// saveCgo saves the information from the #cgo lines in the import "C" comment.
+// These lines set CFLAGS, CPPFLAGS, CXXFLAGS and LDFLAGS and pkg-config directives
+// that affect the way cgo's C code is built.
+func (ctxt *Context) saveCgo(filename string, di *Package, cg *ast.CommentGroup) error {
+ text := cg.Text()
+ for _, line := range strings.Split(text, "\n") {
+ orig := line
+
+ // Line is
+ // #cgo [GOOS/GOARCH...] LDFLAGS: stuff
+ //
+ line = strings.TrimSpace(line)
+ if len(line) < 5 || line[:4] != "#cgo" || (line[4] != ' ' && line[4] != '\t') {
+ continue
+ }
+
+ // Split at colon.
+ line = strings.TrimSpace(line[4:])
+ i := strings.Index(line, ":")
+ if i < 0 {
+ return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
+ }
+ line, argstr := line[:i], line[i+1:]
+
+ // Parse GOOS/GOARCH stuff.
+ f := strings.Fields(line)
+ if len(f) < 1 {
+ return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
+ }
+
+ cond, verb := f[:len(f)-1], f[len(f)-1]
+ if len(cond) > 0 {
+ ok := false
+ for _, c := range cond {
+ if ctxt.match(c, nil) {
+ ok = true
+ break
+ }
+ }
+ if !ok {
+ continue
+ }
+ }
+
+ args, err := splitQuoted(argstr)
+ if err != nil {
+ return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
+ }
+ var ok bool
+ for i, arg := range args {
+ if arg, ok = expandSrcDir(arg, di.Dir); !ok {
+ return fmt.Errorf("%s: malformed #cgo argument: %s", filename, arg)
+ }
+ args[i] = arg
+ }
+
+ switch verb {
+ case "CFLAGS", "CPPFLAGS", "CXXFLAGS", "FFLAGS", "LDFLAGS":
+ // Change relative paths to absolute.
+ ctxt.makePathsAbsolute(args, di.Dir)
+ }
+
+ switch verb {
+ case "CFLAGS":
+ di.CgoCFLAGS = append(di.CgoCFLAGS, args...)
+ case "CPPFLAGS":
+ di.CgoCPPFLAGS = append(di.CgoCPPFLAGS, args...)
+ case "CXXFLAGS":
+ di.CgoCXXFLAGS = append(di.CgoCXXFLAGS, args...)
+ case "FFLAGS":
+ di.CgoFFLAGS = append(di.CgoFFLAGS, args...)
+ case "LDFLAGS":
+ di.CgoLDFLAGS = append(di.CgoLDFLAGS, args...)
+ case "pkg-config":
+ di.CgoPkgConfig = append(di.CgoPkgConfig, args...)
+ default:
+ return fmt.Errorf("%s: invalid #cgo verb: %s", filename, orig)
+ }
+ }
+ return nil
+}
+
+// expandSrcDir expands any occurrence of ${SRCDIR}, making sure
+// the result is safe for the shell.
+func expandSrcDir(str string, srcdir string) (string, bool) {
+ // "\" delimited paths cause safeCgoName to fail
+ // so convert native paths with a different delimiter
+ // to "/" before starting (eg: on windows).
+ srcdir = filepath.ToSlash(srcdir)
+
+ chunks := strings.Split(str, "${SRCDIR}")
+ if len(chunks) < 2 {
+ return str, safeCgoName(str)
+ }
+ ok := true
+ for _, chunk := range chunks {
+ ok = ok && (chunk == "" || safeCgoName(chunk))
+ }
+ ok = ok && (srcdir == "" || safeCgoName(srcdir))
+ res := strings.Join(chunks, srcdir)
+ return res, ok && res != ""
+}
+
+// makePathsAbsolute looks for compiler options that take paths and
+// makes them absolute. We do this because through the 1.8 release we
+// ran the compiler in the package directory, so any relative -I or -L
+// options would be relative to that directory. In 1.9 we changed to
+// running the compiler in the build directory, to get consistent
+// build results (issue #19964). To keep builds working, we change any
+// relative -I or -L options to be absolute.
+//
+// Using filepath.IsAbs and filepath.Join here means the results will be
+// different on different systems, but that's OK: -I and -L options are
+// inherently system-dependent.
+func (ctxt *Context) makePathsAbsolute(args []string, srcDir string) {
+ nextPath := false
+ for i, arg := range args {
+ if nextPath {
+ if !filepath.IsAbs(arg) {
+ args[i] = filepath.Join(srcDir, arg)
+ }
+ nextPath = false
+ } else if strings.HasPrefix(arg, "-I") || strings.HasPrefix(arg, "-L") {
+ if len(arg) == 2 {
+ nextPath = true
+ } else {
+ if !filepath.IsAbs(arg[2:]) {
+ args[i] = arg[:2] + filepath.Join(srcDir, arg[2:])
+ }
+ }
+ }
+ }
+}
+
+// NOTE: $ is not safe for the shell, but it is allowed here because of linker options like -Wl,$ORIGIN.
+// We never pass these arguments to a shell (just to programs we construct argv for), so this should be okay.
+// See golang.org/issue/6038.
+// The @ is for OS X. See golang.org/issue/13720.
+// The % is for Jenkins. See golang.org/issue/16959.
+// The ! is because module paths may use them. See golang.org/issue/26716.
+// The ~ and ^ are for sr.ht. See golang.org/issue/32260.
+const safeString = "+-.,/0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz:$@%! ~^"
+
+func safeCgoName(s string) bool {
+ if s == "" {
+ return false
+ }
+ for i := 0; i < len(s); i++ {
+ if c := s[i]; c < utf8.RuneSelf && strings.IndexByte(safeString, c) < 0 {
+ return false
+ }
+ }
+ return true
+}
+
+// splitQuoted splits the string s around each instance of one or more consecutive
+// white space characters while taking into account quotes and escaping, and
+// returns an array of substrings of s or an empty list if s contains only white space.
+// Single quotes and double quotes are recognized to prevent splitting within the
+// quoted region, and are removed from the resulting substrings. If a quote in s
+// isn't closed err will be set and r will have the unclosed argument as the
+// last element. The backslash is used for escaping.
+//
+// For example, the following string:
+//
+// a b:"c d" 'e''f' "g\""
+//
+// Would be parsed as:
+//
+// []string{"a", "b:c d", "ef", `g"`}
+//
+func splitQuoted(s string) (r []string, err error) {
+ var args []string
+ arg := make([]rune, len(s))
+ escaped := false
+ quoted := false
+ quote := '\x00'
+ i := 0
+ for _, rune := range s {
+ switch {
+ case escaped:
+ escaped = false
+ case rune == '\\':
+ escaped = true
+ continue
+ case quote != '\x00':
+ if rune == quote {
+ quote = '\x00'
+ continue
+ }
+ case rune == '"' || rune == '\'':
+ quoted = true
+ quote = rune
+ continue
+ case unicode.IsSpace(rune):
+ if quoted || i > 0 {
+ quoted = false
+ args = append(args, string(arg[:i]))
+ i = 0
+ }
+ continue
+ }
+ arg[i] = rune
+ i++
+ }
+ if quoted || i > 0 {
+ args = append(args, string(arg[:i]))
+ }
+ if quote != 0 {
+ err = errors.New("unclosed quote")
+ } else if escaped {
+ err = errors.New("unfinished escaping")
+ }
+ return args, err
+}
+
+// match reports whether the name is one of:
+//
+// $GOOS
+// $GOARCH
+// cgo (if cgo is enabled)
+// !cgo (if cgo is disabled)
+// ctxt.Compiler
+// !ctxt.Compiler
+// tag (if tag is listed in ctxt.BuildTags or ctxt.ReleaseTags)
+// !tag (if tag is not listed in ctxt.BuildTags or ctxt.ReleaseTags)
+// a comma-separated list of any of these
+//
+func (ctxt *Context) match(name string, allTags map[string]bool) bool {
+ if name == "" {
+ if allTags != nil {
+ allTags[name] = true
+ }
+ return false
+ }
+ if i := strings.Index(name, ","); i >= 0 {
+ // comma-separated list
+ ok1 := ctxt.match(name[:i], allTags)
+ ok2 := ctxt.match(name[i+1:], allTags)
+ return ok1 && ok2
+ }
+ if strings.HasPrefix(name, "!!") { // bad syntax, reject always
+ return false
+ }
+ if strings.HasPrefix(name, "!") { // negation
+ return len(name) > 1 && !ctxt.match(name[1:], allTags)
+ }
+
+ if allTags != nil {
+ allTags[name] = true
+ }
+
+ // Tags must be letters, digits, underscores or dots.
+ // Unlike in Go identifiers, all digits are fine (e.g., "386").
+ for _, c := range name {
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
+ return false
+ }
+ }
+
+ // special tags
+ if ctxt.CgoEnabled && name == "cgo" {
+ return true
+ }
+ if name == ctxt.GOOS || name == ctxt.GOARCH || name == ctxt.Compiler {
+ return true
+ }
+ if ctxt.GOOS == "android" && name == "linux" {
+ return true
+ }
+ if ctxt.GOOS == "illumos" && name == "solaris" {
+ return true
+ }
+ if ctxt.GOOS == "ios" && name == "darwin" {
+ return true
+ }
+
+ // other tags
+ for _, tag := range ctxt.BuildTags {
+ if tag == name {
+ return true
+ }
+ }
+ for _, tag := range ctxt.ReleaseTags {
+ if tag == name {
+ return true
+ }
+ }
+
+ return false
+}
+
+// goodOSArchFile returns false if the name contains a $GOOS or $GOARCH
+// suffix which does not match the current system.
+// The recognized name formats are:
+//
+// name_$(GOOS).*
+// name_$(GOARCH).*
+// name_$(GOOS)_$(GOARCH).*
+// name_$(GOOS)_test.*
+// name_$(GOARCH)_test.*
+// name_$(GOOS)_$(GOARCH)_test.*
+//
+// Exceptions:
+// if GOOS=android, then files with GOOS=linux are also matched.
+// if GOOS=illumos, then files with GOOS=solaris are also matched.
+// if GOOS=ios, then files with GOOS=darwin are also matched.
+func (ctxt *Context) goodOSArchFile(name string, allTags map[string]bool) bool {
+ if dot := strings.Index(name, "."); dot != -1 {
+ name = name[:dot]
+ }
+
+ // Before Go 1.4, a file called "linux.go" would be equivalent to having a
+ // build tag "linux" in that file. For Go 1.4 and beyond, we require this
+ // auto-tagging to apply only to files with a non-empty prefix, so
+ // "foo_linux.go" is tagged but "linux.go" is not. This allows new operating
+ // systems, such as android, to arrive without breaking existing code with
+ // innocuous source code in "android.go". The easiest fix: cut everything
+ // in the name before the initial _.
+ i := strings.Index(name, "_")
+ if i < 0 {
+ return true
+ }
+ name = name[i:] // ignore everything before first _
+
+ l := strings.Split(name, "_")
+ if n := len(l); n > 0 && l[n-1] == "test" {
+ l = l[:n-1]
+ }
+ n := len(l)
+ if n >= 2 && knownOS[l[n-2]] && knownArch[l[n-1]] {
+ return ctxt.match(l[n-1], allTags) && ctxt.match(l[n-2], allTags)
+ }
+ if n >= 1 && (knownOS[l[n-1]] || knownArch[l[n-1]]) {
+ return ctxt.match(l[n-1], allTags)
+ }
+ return true
+}
+
+var knownOS = make(map[string]bool)
+var knownArch = make(map[string]bool)
+
+func init() {
+ for _, v := range strings.Fields(goosList) {
+ knownOS[v] = true
+ }
+ for _, v := range strings.Fields(goarchList) {
+ knownArch[v] = true
+ }
+}
+
+// ToolDir is the directory containing build tools.
+var ToolDir = getToolDir()
+
+// IsLocalImport reports whether the import path is
+// a local import path, like ".", "..", "./foo", or "../foo".
+func IsLocalImport(path string) bool {
+ return path == "." || path == ".." ||
+ strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../")
+}
+
+// ArchChar returns "?" and an error.
+// In earlier versions of Go, the returned string was used to derive
+// the compiler and linker tool names, the default object file suffix,
+// and the default linker output name. As of Go 1.5, those strings
+// no longer vary by architecture; they are compile, link, .o, and a.out, respectively.
+func ArchChar(goarch string) (string, error) {
+ return "?", errors.New("architecture letter no longer used")
+}
diff --git a/src/go/build/build_test.go b/src/go/build/build_test.go
new file mode 100644
index 0000000..d13ea81
--- /dev/null
+++ b/src/go/build/build_test.go
@@ -0,0 +1,712 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "flag"
+ "internal/testenv"
+ "io"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strings"
+ "testing"
+)
+
+func TestMain(m *testing.M) {
+ flag.Parse()
+ if goTool, err := testenv.GoTool(); err == nil {
+ os.Setenv("PATH", filepath.Dir(goTool)+string(os.PathListSeparator)+os.Getenv("PATH"))
+ }
+ os.Exit(m.Run())
+}
+
+func TestMatch(t *testing.T) {
+ ctxt := Default
+ what := "default"
+ match := func(tag string, want map[string]bool) {
+ t.Helper()
+ m := make(map[string]bool)
+ if !ctxt.match(tag, m) {
+ t.Errorf("%s context should match %s, does not", what, tag)
+ }
+ if !reflect.DeepEqual(m, want) {
+ t.Errorf("%s tags = %v, want %v", tag, m, want)
+ }
+ }
+ nomatch := func(tag string, want map[string]bool) {
+ t.Helper()
+ m := make(map[string]bool)
+ if ctxt.match(tag, m) {
+ t.Errorf("%s context should NOT match %s, does", what, tag)
+ }
+ if !reflect.DeepEqual(m, want) {
+ t.Errorf("%s tags = %v, want %v", tag, m, want)
+ }
+ }
+
+ match(runtime.GOOS+","+runtime.GOARCH, map[string]bool{runtime.GOOS: true, runtime.GOARCH: true})
+ match(runtime.GOOS+","+runtime.GOARCH+",!foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+ nomatch(runtime.GOOS+","+runtime.GOARCH+",foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+
+ what = "modified"
+ ctxt.BuildTags = []string{"foo"}
+ match(runtime.GOOS+","+runtime.GOARCH, map[string]bool{runtime.GOOS: true, runtime.GOARCH: true})
+ match(runtime.GOOS+","+runtime.GOARCH+",foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+ nomatch(runtime.GOOS+","+runtime.GOARCH+",!foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+ match(runtime.GOOS+","+runtime.GOARCH+",!bar", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "bar": true})
+ nomatch(runtime.GOOS+","+runtime.GOARCH+",bar", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "bar": true})
+}
+
+func TestDotSlashImport(t *testing.T) {
+ p, err := ImportDir("testdata/other", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(p.Imports) != 1 || p.Imports[0] != "./file" {
+ t.Fatalf("testdata/other: Imports=%v, want [./file]", p.Imports)
+ }
+
+ p1, err := Import("./file", "testdata/other", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if p1.Name != "file" {
+ t.Fatalf("./file: Name=%q, want %q", p1.Name, "file")
+ }
+ dir := filepath.Clean("testdata/other/file") // Clean to use \ on Windows
+ if p1.Dir != dir {
+ t.Fatalf("./file: Dir=%q, want %q", p1.Name, dir)
+ }
+}
+
+func TestEmptyImport(t *testing.T) {
+ p, err := Import("", Default.GOROOT, FindOnly)
+ if err == nil {
+ t.Fatal(`Import("") returned nil error.`)
+ }
+ if p == nil {
+ t.Fatal(`Import("") returned nil package.`)
+ }
+ if p.ImportPath != "" {
+ t.Fatalf("ImportPath=%q, want %q.", p.ImportPath, "")
+ }
+}
+
+func TestEmptyFolderImport(t *testing.T) {
+ _, err := Import(".", "testdata/empty", 0)
+ if _, ok := err.(*NoGoError); !ok {
+ t.Fatal(`Import("testdata/empty") did not return NoGoError.`)
+ }
+}
+
+func TestMultiplePackageImport(t *testing.T) {
+ _, err := Import(".", "testdata/multi", 0)
+ mpe, ok := err.(*MultiplePackageError)
+ if !ok {
+ t.Fatal(`Import("testdata/multi") did not return MultiplePackageError.`)
+ }
+ want := &MultiplePackageError{
+ Dir: filepath.FromSlash("testdata/multi"),
+ Packages: []string{"main", "test_package"},
+ Files: []string{"file.go", "file_appengine.go"},
+ }
+ if !reflect.DeepEqual(mpe, want) {
+ t.Errorf("got %#v; want %#v", mpe, want)
+ }
+}
+
+func TestLocalDirectory(t *testing.T) {
+ if runtime.GOOS == "ios" {
+ t.Skipf("skipping on %s/%s, no valid GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ cwd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ p, err := ImportDir(cwd, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if p.ImportPath != "go/build" {
+ t.Fatalf("ImportPath=%q, want %q", p.ImportPath, "go/build")
+ }
+}
+
+var shouldBuildTests = []struct {
+ name string
+ content string
+ tags map[string]bool
+ binaryOnly bool
+ shouldBuild bool
+ err error
+}{
+ {
+ name: "Yes",
+ content: "// +build yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "Or",
+ content: "// +build no yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: true,
+ },
+ {
+ name: "And",
+ content: "// +build no,yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Cgo",
+ content: "// +build cgo\n\n" +
+ "// Copyright The Go Authors.\n\n" +
+ "// This package implements parsing of tags like\n" +
+ "// +build tag1\n" +
+ "package build",
+ tags: map[string]bool{"cgo": true},
+ shouldBuild: false,
+ },
+ {
+ name: "AfterPackage",
+ content: "// Copyright The Go Authors.\n\n" +
+ "package build\n\n" +
+ "// shouldBuild checks tags given by lines of the form\n" +
+ "// +build tag\n" +
+ "func shouldBuild(content []byte)\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooClose",
+ content: "// +build yes\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooCloseNo",
+ content: "// +build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "BinaryOnly",
+ content: "//go:binary-only-package\n" +
+ "// +build yes\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ binaryOnly: true,
+ shouldBuild: true,
+ },
+ {
+ name: "ValidGoBuild",
+ content: "// +build yes\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "MissingBuild",
+ content: "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "MissingBuild2",
+ content: "/* */\n" +
+ "// +build yes\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "MissingBuild2",
+ content: "/*\n" +
+ "// +build yes\n\n" +
+ "*/\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "Comment1",
+ content: "/*\n" +
+ "//go:build no\n" +
+ "*/\n\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "Comment2",
+ content: "/*\n" +
+ "text\n" +
+ "*/\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "Comment3",
+ content: "/*/*/ /* hi *//* \n" +
+ "text\n" +
+ "*/\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+ {
+ name: "Comment4",
+ content: "/**///go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "Comment5",
+ content: "/**/\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: false,
+ err: errGoBuildWithoutBuild,
+ },
+}
+
+func TestShouldBuild(t *testing.T) {
+ for _, tt := range shouldBuildTests {
+ t.Run(tt.name, func(t *testing.T) {
+ ctx := &Context{BuildTags: []string{"yes"}}
+ tags := map[string]bool{}
+ shouldBuild, binaryOnly, err := ctx.shouldBuild([]byte(tt.content), tags)
+ if shouldBuild != tt.shouldBuild || binaryOnly != tt.binaryOnly || !reflect.DeepEqual(tags, tt.tags) || err != tt.err {
+ t.Errorf("mismatch:\n"+
+ "have shouldBuild=%v, binaryOnly=%v, tags=%v, err=%v\n"+
+ "want shouldBuild=%v, binaryOnly=%v, tags=%v, err=%v",
+ shouldBuild, binaryOnly, tags, err,
+ tt.shouldBuild, tt.binaryOnly, tt.tags, tt.err)
+ }
+ })
+ }
+}
+
+func TestGoodOSArchFile(t *testing.T) {
+ ctx := &Context{BuildTags: []string{"linux"}, GOOS: "darwin"}
+ m := map[string]bool{}
+ want := map[string]bool{"linux": true}
+ if !ctx.goodOSArchFile("hello_linux.go", m) {
+ t.Errorf("goodOSArchFile(hello_linux.go) = false, want true")
+ }
+ if !reflect.DeepEqual(m, want) {
+ t.Errorf("goodOSArchFile(hello_linux.go) tags = %v, want %v", m, want)
+ }
+}
+
+type readNopCloser struct {
+ io.Reader
+}
+
+func (r readNopCloser) Close() error {
+ return nil
+}
+
+var (
+ ctxtP9 = Context{GOARCH: "arm", GOOS: "plan9"}
+ ctxtAndroid = Context{GOARCH: "arm", GOOS: "android"}
+)
+
+var matchFileTests = []struct {
+ ctxt Context
+ name string
+ data string
+ match bool
+}{
+ {ctxtP9, "foo_arm.go", "", true},
+ {ctxtP9, "foo1_arm.go", "// +build linux\n\npackage main\n", false},
+ {ctxtP9, "foo_darwin.go", "", false},
+ {ctxtP9, "foo.go", "", true},
+ {ctxtP9, "foo1.go", "// +build linux\n\npackage main\n", false},
+ {ctxtP9, "foo.badsuffix", "", false},
+ {ctxtAndroid, "foo_linux.go", "", true},
+ {ctxtAndroid, "foo_android.go", "", true},
+ {ctxtAndroid, "foo_plan9.go", "", false},
+ {ctxtAndroid, "android.go", "", true},
+ {ctxtAndroid, "plan9.go", "", true},
+ {ctxtAndroid, "plan9_test.go", "", true},
+ {ctxtAndroid, "arm.s", "", true},
+ {ctxtAndroid, "amd64.s", "", true},
+}
+
+func TestMatchFile(t *testing.T) {
+ for _, tt := range matchFileTests {
+ ctxt := tt.ctxt
+ ctxt.OpenFile = func(path string) (r io.ReadCloser, err error) {
+ if path != "x+"+tt.name {
+ t.Fatalf("OpenFile asked for %q, expected %q", path, "x+"+tt.name)
+ }
+ return &readNopCloser{strings.NewReader(tt.data)}, nil
+ }
+ ctxt.JoinPath = func(elem ...string) string {
+ return strings.Join(elem, "+")
+ }
+ match, err := ctxt.MatchFile("x", tt.name)
+ if match != tt.match || err != nil {
+ t.Fatalf("MatchFile(%q) = %v, %v, want %v, nil", tt.name, match, err, tt.match)
+ }
+ }
+}
+
+func TestImportCmd(t *testing.T) {
+ if runtime.GOOS == "ios" {
+ t.Skipf("skipping on %s/%s, no valid GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ p, err := Import("cmd/internal/objfile", "", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !strings.HasSuffix(filepath.ToSlash(p.Dir), "src/cmd/internal/objfile") {
+ t.Fatalf("Import cmd/internal/objfile returned Dir=%q, want %q", filepath.ToSlash(p.Dir), ".../src/cmd/internal/objfile")
+ }
+}
+
+var (
+ expandSrcDirPath = filepath.Join(string(filepath.Separator)+"projects", "src", "add")
+)
+
+var expandSrcDirTests = []struct {
+ input, expected string
+}{
+ {"-L ${SRCDIR}/libs -ladd", "-L /projects/src/add/libs -ladd"},
+ {"${SRCDIR}/add_linux_386.a -pthread -lstdc++", "/projects/src/add/add_linux_386.a -pthread -lstdc++"},
+ {"Nothing to expand here!", "Nothing to expand here!"},
+ {"$", "$"},
+ {"$$", "$$"},
+ {"${", "${"},
+ {"$}", "$}"},
+ {"$FOO ${BAR}", "$FOO ${BAR}"},
+ {"Find me the $SRCDIRECTORY.", "Find me the $SRCDIRECTORY."},
+ {"$SRCDIR is missing braces", "$SRCDIR is missing braces"},
+}
+
+func TestExpandSrcDir(t *testing.T) {
+ for _, test := range expandSrcDirTests {
+ output, _ := expandSrcDir(test.input, expandSrcDirPath)
+ if output != test.expected {
+ t.Errorf("%q expands to %q with SRCDIR=%q when %q is expected", test.input, output, expandSrcDirPath, test.expected)
+ } else {
+ t.Logf("%q expands to %q with SRCDIR=%q", test.input, output, expandSrcDirPath)
+ }
+ }
+}
+
+func TestShellSafety(t *testing.T) {
+ tests := []struct {
+ input, srcdir, expected string
+ result bool
+ }{
+ {"-I${SRCDIR}/../include", "/projects/src/issue 11868", "-I/projects/src/issue 11868/../include", true},
+ {"-I${SRCDIR}", "~wtf$@%^", "-I~wtf$@%^", true},
+ {"-X${SRCDIR}/1,${SRCDIR}/2", "/projects/src/issue 11868", "-X/projects/src/issue 11868/1,/projects/src/issue 11868/2", true},
+ {"-I/tmp -I/tmp", "/tmp2", "-I/tmp -I/tmp", true},
+ {"-I/tmp", "/tmp/[0]", "-I/tmp", true},
+ {"-I${SRCDIR}/dir", "/tmp/[0]", "-I/tmp/[0]/dir", false},
+ {"-I${SRCDIR}/dir", "/tmp/go go", "-I/tmp/go go/dir", true},
+ {"-I${SRCDIR}/dir dir", "/tmp/go", "-I/tmp/go/dir dir", true},
+ }
+ for _, test := range tests {
+ output, ok := expandSrcDir(test.input, test.srcdir)
+ if ok != test.result {
+ t.Errorf("Expected %t while %q expands to %q with SRCDIR=%q; got %t", test.result, test.input, output, test.srcdir, ok)
+ }
+ if output != test.expected {
+ t.Errorf("Expected %q while %q expands with SRCDIR=%q; got %q", test.expected, test.input, test.srcdir, output)
+ }
+ }
+}
+
+// Want to get a "cannot find package" error when directory for package does not exist.
+// There should be valid partial information in the returned non-nil *Package.
+func TestImportDirNotExist(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+ ctxt := Default
+
+ emptyDir, err := os.MkdirTemp("", t.Name())
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(emptyDir)
+
+ ctxt.GOPATH = emptyDir
+ ctxt.Dir = emptyDir
+
+ tests := []struct {
+ label string
+ path, srcDir string
+ mode ImportMode
+ }{
+ {"Import(full, 0)", "go/build/doesnotexist", "", 0},
+ {"Import(local, 0)", "./doesnotexist", filepath.Join(ctxt.GOROOT, "src/go/build"), 0},
+ {"Import(full, FindOnly)", "go/build/doesnotexist", "", FindOnly},
+ {"Import(local, FindOnly)", "./doesnotexist", filepath.Join(ctxt.GOROOT, "src/go/build"), FindOnly},
+ }
+
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+
+ for _, GO111MODULE := range []string{"off", "on"} {
+ t.Run("GO111MODULE="+GO111MODULE, func(t *testing.T) {
+ os.Setenv("GO111MODULE", GO111MODULE)
+
+ for _, test := range tests {
+ p, err := ctxt.Import(test.path, test.srcDir, test.mode)
+
+ errOk := (err != nil && strings.HasPrefix(err.Error(), "cannot find package"))
+ wantErr := `"cannot find package" error`
+ if test.srcDir == "" {
+ if err != nil && strings.Contains(err.Error(), "is not in GOROOT") {
+ errOk = true
+ }
+ wantErr = `"cannot find package" or "is not in GOROOT" error`
+ }
+ if !errOk {
+ t.Errorf("%s got error: %q, want %s", test.label, err, wantErr)
+ }
+ // If an error occurs, build.Import is documented to return
+ // a non-nil *Package containing partial information.
+ if p == nil {
+ t.Fatalf(`%s got nil p, want non-nil *Package`, test.label)
+ }
+ // Verify partial information in p.
+ if p.ImportPath != "go/build/doesnotexist" {
+ t.Errorf(`%s got p.ImportPath: %q, want "go/build/doesnotexist"`, test.label, p.ImportPath)
+ }
+ }
+ })
+ }
+}
+
+func TestImportVendor(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+ os.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ p, err := ctxt.Import("c/d", filepath.Join(ctxt.GOPATH, "src/a/b"), 0)
+ if err != nil {
+ t.Fatalf("cannot find vendored c/d from testdata src/a/b directory: %v", err)
+ }
+ want := "a/vendor/c/d"
+ if p.ImportPath != want {
+ t.Fatalf("Import succeeded but found %q, want %q", p.ImportPath, want)
+ }
+}
+
+func TestImportVendorFailure(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+ os.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ p, err := ctxt.Import("x.com/y/z", filepath.Join(ctxt.GOPATH, "src/a/b"), 0)
+ if err == nil {
+ t.Fatalf("found made-up package x.com/y/z in %s", p.Dir)
+ }
+
+ e := err.Error()
+ if !strings.Contains(e, " (vendor tree)") {
+ t.Fatalf("error on failed import does not mention GOROOT/src/vendor directory:\n%s", e)
+ }
+}
+
+func TestImportVendorParentFailure(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+ os.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ // This import should fail because the vendor/c directory has no source code.
+ p, err := ctxt.Import("c", filepath.Join(ctxt.GOPATH, "src/a/b"), 0)
+ if err == nil {
+ t.Fatalf("found empty parent in %s", p.Dir)
+ }
+ if p != nil && p.Dir != "" {
+ t.Fatalf("decided to use %s", p.Dir)
+ }
+ e := err.Error()
+ if !strings.Contains(e, " (vendor tree)") {
+ t.Fatalf("error on failed import does not mention GOROOT/src/vendor directory:\n%s", e)
+ }
+}
+
+// Check that a package is loaded in module mode if GO111MODULE=on, even when
+// no go.mod file is present. It should fail to resolve packages outside std.
+// Verifies golang.org/issue/34669.
+func TestImportPackageOutsideModule(t *testing.T) {
+ testenv.MustHaveGoBuild(t)
+
+ // Disable module fetching for this test so that 'go list' fails quickly
+ // without trying to find the latest version of a module.
+ defer os.Setenv("GOPROXY", os.Getenv("GOPROXY"))
+ os.Setenv("GOPROXY", "off")
+
+ // Create a GOPATH in a temporary directory. We don't use testdata
+ // because it's in GOROOT, which interferes with the module heuristic.
+ gopath, err := os.MkdirTemp("", "gobuild-notmodule")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(gopath)
+ if err := os.MkdirAll(filepath.Join(gopath, "src/example.com/p"), 0777); err != nil {
+ t.Fatal(err)
+ }
+ if err := os.WriteFile(filepath.Join(gopath, "src/example.com/p/p.go"), []byte("package p"), 0666); err != nil {
+ t.Fatal(err)
+ }
+
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+ os.Setenv("GO111MODULE", "on")
+ defer os.Setenv("GOPATH", os.Getenv("GOPATH"))
+ os.Setenv("GOPATH", gopath)
+ ctxt := Default
+ ctxt.GOPATH = gopath
+ ctxt.Dir = filepath.Join(gopath, "src/example.com/p")
+
+ want := "go.mod file not found in current directory or any parent directory"
+ if _, err := ctxt.Import("example.com/p", gopath, FindOnly); err == nil {
+ t.Fatal("importing package when no go.mod is present succeeded unexpectedly")
+ } else if errStr := err.Error(); !strings.Contains(errStr, want) {
+ t.Fatalf("error when importing package when no go.mod is present: got %q; want %q", errStr, want)
+ } else {
+ t.Logf(`ctxt.Import("example.com/p", _, FindOnly): %v`, err)
+ }
+}
+
+func TestImportDirTarget(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+ ctxt := Default
+ ctxt.GOPATH = ""
+ p, err := ctxt.ImportDir(filepath.Join(ctxt.GOROOT, "src/path"), 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if p.PkgTargetRoot == "" || p.PkgObj == "" {
+ t.Errorf("p.PkgTargetRoot == %q, p.PkgObj == %q, want non-empty", p.PkgTargetRoot, p.PkgObj)
+ }
+}
+
+// TestIssue23594 prevents go/build from regressing and populating Package.Doc
+// from comments in test files.
+func TestIssue23594(t *testing.T) {
+ // Package testdata/doc contains regular and external test files
+ // with comments attached to their package declarations. The names of the files
+ // ensure that we see the comments from the test files first.
+ p, err := ImportDir("testdata/doc", 0)
+ if err != nil {
+ t.Fatalf("could not import testdata: %v", err)
+ }
+
+ if p.Doc != "Correct" {
+ t.Fatalf("incorrectly set .Doc to %q", p.Doc)
+ }
+}
+
+// TestMissingImportErrorRepetition checks that when an unknown package is
+// imported, the package path is only shown once in the error.
+// Verifies golang.org/issue/34752.
+func TestMissingImportErrorRepetition(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // need 'go list' internally
+ tmp, err := os.MkdirTemp("", "")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(tmp)
+ if err := os.WriteFile(filepath.Join(tmp, "go.mod"), []byte("module m"), 0666); err != nil {
+ t.Fatal(err)
+ }
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+ os.Setenv("GO111MODULE", "on")
+ defer os.Setenv("GOPROXY", os.Getenv("GOPROXY"))
+ os.Setenv("GOPROXY", "off")
+ defer os.Setenv("GONOPROXY", os.Getenv("GONOPROXY"))
+ os.Setenv("GONOPROXY", "none")
+
+ ctxt := Default
+ ctxt.Dir = tmp
+
+ pkgPath := "example.com/hello"
+ _, err = ctxt.Import(pkgPath, tmp, FindOnly)
+ if err == nil {
+ t.Fatal("unexpected success")
+ }
+
+ // Don't count the package path with a URL like https://...?go-get=1.
+ // See golang.org/issue/35986.
+ errStr := strings.ReplaceAll(err.Error(), "://"+pkgPath+"?go-get=1", "://...?go-get=1")
+
+ // Also don't count instances in suggested "go get" or similar commands
+ // (see https://golang.org/issue/41576). The suggested command typically
+ // follows a semicolon.
+ errStr = strings.SplitN(errStr, ";", 2)[0]
+
+ if n := strings.Count(errStr, pkgPath); n != 1 {
+ t.Fatalf("package path %q appears in error %d times; should appear once\nerror: %v", pkgPath, n, err)
+ }
+}
+
+// TestCgoImportsIgnored checks that imports in cgo files are not included
+// in the imports list when cgo is disabled.
+// Verifies golang.org/issue/35946.
+func TestCgoImportsIgnored(t *testing.T) {
+ ctxt := Default
+ ctxt.CgoEnabled = false
+ p, err := ctxt.ImportDir("testdata/cgo_disabled", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, path := range p.Imports {
+ if path == "should/be/ignored" {
+ t.Errorf("found import %q in ignored cgo file", path)
+ }
+ }
+}
diff --git a/src/go/build/constraint/expr.go b/src/go/build/constraint/expr.go
new file mode 100644
index 0000000..3b27870
--- /dev/null
+++ b/src/go/build/constraint/expr.go
@@ -0,0 +1,574 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package constraint implements parsing and evaluation of build constraint lines.
+// See https://golang.org/cmd/go/#hdr-Build_constraints for documentation about build constraints themselves.
+//
+// This package parses both the original “// +build” syntax and the “//go:build” syntax that will be added in Go 1.17.
+// The parser is being included in Go 1.16 to allow tools that need to process Go 1.17 source code
+// to still be built against the Go 1.16 release.
+// See https://golang.org/design/draft-gobuild for details about the “//go:build” syntax.
+package constraint
+
+import (
+ "errors"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// An Expr is a build tag constraint expression.
+// The underlying concrete type is *AndExpr, *OrExpr, *NotExpr, or *TagExpr.
+type Expr interface {
+ // String returns the string form of the expression,
+ // using the boolean syntax used in //go:build lines.
+ String() string
+
+ // Eval reports whether the expression evaluates to true.
+ // It calls ok(tag) as needed to find out whether a given build tag
+ // is satisfied by the current build configuration.
+ Eval(ok func(tag string) bool) bool
+
+ // The presence of an isExpr method explicitly marks the type as an Expr.
+ // Only implementations in this package should be used as Exprs.
+ isExpr()
+}
+
+// A TagExpr is an Expr for the single tag Tag.
+type TagExpr struct {
+ Tag string // for example, “linux” or “cgo”
+}
+
+func (x *TagExpr) isExpr() {}
+
+func (x *TagExpr) Eval(ok func(tag string) bool) bool {
+ return ok(x.Tag)
+}
+
+func (x *TagExpr) String() string {
+ return x.Tag
+}
+
+func tag(tag string) Expr { return &TagExpr{tag} }
+
+// A NotExpr represents the expression !X (the negation of X).
+type NotExpr struct {
+ X Expr
+}
+
+func (x *NotExpr) isExpr() {}
+
+func (x *NotExpr) Eval(ok func(tag string) bool) bool {
+ return !x.X.Eval(ok)
+}
+
+func (x *NotExpr) String() string {
+ s := x.X.String()
+ switch x.X.(type) {
+ case *AndExpr, *OrExpr:
+ s = "(" + s + ")"
+ }
+ return "!" + s
+}
+
+func not(x Expr) Expr { return &NotExpr{x} }
+
+// An AndExpr represents the expression X && Y.
+type AndExpr struct {
+ X, Y Expr
+}
+
+func (x *AndExpr) isExpr() {}
+
+func (x *AndExpr) Eval(ok func(tag string) bool) bool {
+ // Note: Eval both, to make sure ok func observes all tags.
+ xok := x.X.Eval(ok)
+ yok := x.Y.Eval(ok)
+ return xok && yok
+}
+
+func (x *AndExpr) String() string {
+ return andArg(x.X) + " && " + andArg(x.Y)
+}
+
+func andArg(x Expr) string {
+ s := x.String()
+ if _, ok := x.(*OrExpr); ok {
+ s = "(" + s + ")"
+ }
+ return s
+}
+
+func and(x, y Expr) Expr {
+ return &AndExpr{x, y}
+}
+
+// An OrExpr represents the expression X || Y.
+type OrExpr struct {
+ X, Y Expr
+}
+
+func (x *OrExpr) isExpr() {}
+
+func (x *OrExpr) Eval(ok func(tag string) bool) bool {
+ // Note: Eval both, to make sure ok func observes all tags.
+ xok := x.X.Eval(ok)
+ yok := x.Y.Eval(ok)
+ return xok || yok
+}
+
+func (x *OrExpr) String() string {
+ return orArg(x.X) + " || " + orArg(x.Y)
+}
+
+func orArg(x Expr) string {
+ s := x.String()
+ if _, ok := x.(*AndExpr); ok {
+ s = "(" + s + ")"
+ }
+ return s
+}
+
+func or(x, y Expr) Expr {
+ return &OrExpr{x, y}
+}
+
+// A SyntaxError reports a syntax error in a parsed build expression.
+type SyntaxError struct {
+ Offset int // byte offset in input where error was detected
+ Err string // description of error
+}
+
+func (e *SyntaxError) Error() string {
+ return e.Err
+}
+
+var errNotConstraint = errors.New("not a build constraint")
+
+// Parse parses a single build constraint line of the form “//go:build ...” or “// +build ...”
+// and returns the corresponding boolean expression.
+func Parse(line string) (Expr, error) {
+ if text, ok := splitGoBuild(line); ok {
+ return parseExpr(text)
+ }
+ if text, ok := splitPlusBuild(line); ok {
+ return parsePlusBuildExpr(text), nil
+ }
+ return nil, errNotConstraint
+}
+
+// IsGoBuild reports whether the line of text is a “//go:build” constraint.
+// It only checks the prefix of the text, not that the expression itself parses.
+func IsGoBuild(line string) bool {
+ _, ok := splitGoBuild(line)
+ return ok
+}
+
+// splitGoBuild splits apart the leading //go:build prefix in line from the build expression itself.
+// It returns "", false if the input is not a //go:build line or if the input contains multiple lines.
+func splitGoBuild(line string) (expr string, ok bool) {
+ // A single trailing newline is OK; otherwise multiple lines are not.
+ if len(line) > 0 && line[len(line)-1] == '\n' {
+ line = line[:len(line)-1]
+ }
+ if strings.Contains(line, "\n") {
+ return "", false
+ }
+
+ if !strings.HasPrefix(line, "//go:build") {
+ return "", false
+ }
+
+ line = strings.TrimSpace(line)
+ line = line[len("//go:build"):]
+
+ // If strings.TrimSpace finds more to trim after removing the //go:build prefix,
+ // it means that the prefix was followed by a space, making this a //go:build line
+ // (as opposed to a //go:buildsomethingelse line).
+ // If line is empty, we had "//go:build" by itself, which also counts.
+ trim := strings.TrimSpace(line)
+ if len(line) == len(trim) && line != "" {
+ return "", false
+ }
+
+ return trim, true
+}
+
+// An exprParser holds state for parsing a build expression.
+type exprParser struct {
+ s string // input string
+ i int // next read location in s
+
+ tok string // last token read
+ isTag bool
+ pos int // position (start) of last token
+}
+
+// parseExpr parses a boolean build tag expression.
+func parseExpr(text string) (x Expr, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if e, ok := e.(*SyntaxError); ok {
+ err = e
+ return
+ }
+ panic(e) // unreachable unless parser has a bug
+ }
+ }()
+
+ p := &exprParser{s: text}
+ x = p.or()
+ if p.tok != "" {
+ panic(&SyntaxError{Offset: p.pos, Err: "unexpected token " + p.tok})
+ }
+ return x, nil
+}
+
+// or parses a sequence of || expressions.
+// On entry, the next input token has not yet been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) or() Expr {
+ x := p.and()
+ for p.tok == "||" {
+ x = or(x, p.and())
+ }
+ return x
+}
+
+// and parses a sequence of && expressions.
+// On entry, the next input token has not yet been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) and() Expr {
+ x := p.not()
+ for p.tok == "&&" {
+ x = and(x, p.not())
+ }
+ return x
+}
+
+// not parses a ! expression.
+// On entry, the next input token has not yet been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) not() Expr {
+ p.lex()
+ if p.tok == "!" {
+ p.lex()
+ if p.tok == "!" {
+ panic(&SyntaxError{Offset: p.pos, Err: "double negation not allowed"})
+ }
+ return not(p.atom())
+ }
+ return p.atom()
+}
+
+// atom parses a tag or a parenthesized expression.
+// On entry, the next input token HAS been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) atom() Expr {
+ // first token already in p.tok
+ if p.tok == "(" {
+ pos := p.pos
+ defer func() {
+ if e := recover(); e != nil {
+ if e, ok := e.(*SyntaxError); ok && e.Err == "unexpected end of expression" {
+ e.Err = "missing close paren"
+ }
+ panic(e)
+ }
+ }()
+ x := p.or()
+ if p.tok != ")" {
+ panic(&SyntaxError{Offset: pos, Err: "missing close paren"})
+ }
+ p.lex()
+ return x
+ }
+
+ if !p.isTag {
+ if p.tok == "" {
+ panic(&SyntaxError{Offset: p.pos, Err: "unexpected end of expression"})
+ }
+ panic(&SyntaxError{Offset: p.pos, Err: "unexpected token " + p.tok})
+ }
+ tok := p.tok
+ p.lex()
+ return tag(tok)
+}
+
+// lex finds and consumes the next token in the input stream.
+// On return, p.tok is set to the token text,
+// p.isTag reports whether the token was a tag,
+// and p.pos records the byte offset of the start of the token in the input stream.
+// If lex reaches the end of the input, p.tok is set to the empty string.
+// For any other syntax error, lex panics with a SyntaxError.
+func (p *exprParser) lex() {
+ p.isTag = false
+ for p.i < len(p.s) && (p.s[p.i] == ' ' || p.s[p.i] == '\t') {
+ p.i++
+ }
+ if p.i >= len(p.s) {
+ p.tok = ""
+ p.pos = p.i
+ return
+ }
+ switch p.s[p.i] {
+ case '(', ')', '!':
+ p.pos = p.i
+ p.i++
+ p.tok = p.s[p.pos:p.i]
+ return
+
+ case '&', '|':
+ if p.i+1 >= len(p.s) || p.s[p.i+1] != p.s[p.i] {
+ panic(&SyntaxError{Offset: p.i, Err: "invalid syntax at " + string(rune(p.s[p.i]))})
+ }
+ p.pos = p.i
+ p.i += 2
+ p.tok = p.s[p.pos:p.i]
+ return
+ }
+
+ tag := p.s[p.i:]
+ for i, c := range tag {
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
+ tag = tag[:i]
+ break
+ }
+ }
+ if tag == "" {
+ c, _ := utf8.DecodeRuneInString(p.s[p.i:])
+ panic(&SyntaxError{Offset: p.i, Err: "invalid syntax at " + string(c)})
+ }
+
+ p.pos = p.i
+ p.i += len(tag)
+ p.tok = p.s[p.pos:p.i]
+ p.isTag = true
+ return
+}
+
+// IsPlusBuild reports whether the line of text is a “// +build” constraint.
+// It only checks the prefix of the text, not that the expression itself parses.
+func IsPlusBuild(line string) bool {
+ _, ok := splitPlusBuild(line)
+ return ok
+}
+
+// splitGoBuild splits apart the leading //go:build prefix in line from the build expression itself.
+// It returns "", false if the input is not a //go:build line or if the input contains multiple lines.
+func splitPlusBuild(line string) (expr string, ok bool) {
+ // A single trailing newline is OK; otherwise multiple lines are not.
+ if len(line) > 0 && line[len(line)-1] == '\n' {
+ line = line[:len(line)-1]
+ }
+ if strings.Contains(line, "\n") {
+ return "", false
+ }
+
+ if !strings.HasPrefix(line, "//") {
+ return "", false
+ }
+ line = line[len("//"):]
+ // Note the space is optional; "//+build" is recognized too.
+ line = strings.TrimSpace(line)
+
+ if !strings.HasPrefix(line, "+build") {
+ return "", false
+ }
+ line = line[len("+build"):]
+
+ // If strings.TrimSpace finds more to trim after removing the +build prefix,
+ // it means that the prefix was followed by a space, making this a +build line
+ // (as opposed to a +buildsomethingelse line).
+ // If line is empty, we had "// +build" by itself, which also counts.
+ trim := strings.TrimSpace(line)
+ if len(line) == len(trim) && line != "" {
+ return "", false
+ }
+
+ return trim, true
+}
+
+// parsePlusBuildExpr parses a legacy build tag expression (as used with “// +build”).
+func parsePlusBuildExpr(text string) Expr {
+ var x Expr
+ for _, clause := range strings.Fields(text) {
+ var y Expr
+ for _, lit := range strings.Split(clause, ",") {
+ var z Expr
+ var neg bool
+ if strings.HasPrefix(lit, "!!") || lit == "!" {
+ z = tag("ignore")
+ } else {
+ if strings.HasPrefix(lit, "!") {
+ neg = true
+ lit = lit[len("!"):]
+ }
+ if isValidTag(lit) {
+ z = tag(lit)
+ } else {
+ z = tag("ignore")
+ }
+ if neg {
+ z = not(z)
+ }
+ }
+ if y == nil {
+ y = z
+ } else {
+ y = and(y, z)
+ }
+ }
+ if x == nil {
+ x = y
+ } else {
+ x = or(x, y)
+ }
+ }
+ return x
+}
+
+// isValidTag reports whether the word is a valid build tag.
+// Tags must be letters, digits, underscores or dots.
+// Unlike in Go identifiers, all digits are fine (e.g., "386").
+func isValidTag(word string) bool {
+ if word == "" {
+ return false
+ }
+ for _, c := range word {
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
+ return false
+ }
+ }
+ return true
+}
+
+var errComplex = errors.New("expression too complex for // +build lines")
+
+// PlusBuildLines returns a sequence of “// +build” lines that evaluate to the build expression x.
+// If the expression is too complex to convert directly to “// +build” lines, PlusBuildLines returns an error.
+func PlusBuildLines(x Expr) ([]string, error) {
+ // Push all NOTs to the expression leaves, so that //go:build !(x && y) can be treated as !x || !y.
+ // This rewrite is both efficient and commonly needed, so it's worth doing.
+ // Essentially all other possible rewrites are too expensive and too rarely needed.
+ x = pushNot(x, false)
+
+ // Split into AND of ORs of ANDs of literals (tag or NOT tag).
+ var split [][][]Expr
+ for _, or := range appendSplitAnd(nil, x) {
+ var ands [][]Expr
+ for _, and := range appendSplitOr(nil, or) {
+ var lits []Expr
+ for _, lit := range appendSplitAnd(nil, and) {
+ switch lit.(type) {
+ case *TagExpr, *NotExpr:
+ lits = append(lits, lit)
+ default:
+ return nil, errComplex
+ }
+ }
+ ands = append(ands, lits)
+ }
+ split = append(split, ands)
+ }
+
+ // If all the ORs have length 1 (no actual OR'ing going on),
+ // push the top-level ANDs to the bottom level, so that we get
+ // one // +build line instead of many.
+ maxOr := 0
+ for _, or := range split {
+ if maxOr < len(or) {
+ maxOr = len(or)
+ }
+ }
+ if maxOr == 1 {
+ var lits []Expr
+ for _, or := range split {
+ lits = append(lits, or[0]...)
+ }
+ split = [][][]Expr{{lits}}
+ }
+
+ // Prepare the +build lines.
+ var lines []string
+ for _, or := range split {
+ line := "// +build"
+ for _, and := range or {
+ clause := ""
+ for i, lit := range and {
+ if i > 0 {
+ clause += ","
+ }
+ clause += lit.String()
+ }
+ line += " " + clause
+ }
+ lines = append(lines, line)
+ }
+
+ return lines, nil
+}
+
+// pushNot applies DeMorgan's law to push negations down the expression,
+// so that only tags are negated in the result.
+// (It applies the rewrites !(X && Y) => (!X || !Y) and !(X || Y) => (!X && !Y).)
+func pushNot(x Expr, not bool) Expr {
+ switch x := x.(type) {
+ default:
+ // unreachable
+ return x
+ case *NotExpr:
+ if _, ok := x.X.(*TagExpr); ok && !not {
+ return x
+ }
+ return pushNot(x.X, !not)
+ case *TagExpr:
+ if not {
+ return &NotExpr{X: x}
+ }
+ return x
+ case *AndExpr:
+ x1 := pushNot(x.X, not)
+ y1 := pushNot(x.Y, not)
+ if not {
+ return or(x1, y1)
+ }
+ if x1 == x.X && y1 == x.Y {
+ return x
+ }
+ return and(x1, y1)
+ case *OrExpr:
+ x1 := pushNot(x.X, not)
+ y1 := pushNot(x.Y, not)
+ if not {
+ return and(x1, y1)
+ }
+ if x1 == x.X && y1 == x.Y {
+ return x
+ }
+ return or(x1, y1)
+ }
+}
+
+// appendSplitAnd appends x to list while splitting apart any top-level && expressions.
+// For example, appendSplitAnd({W}, X && Y && Z) = {W, X, Y, Z}.
+func appendSplitAnd(list []Expr, x Expr) []Expr {
+ if x, ok := x.(*AndExpr); ok {
+ list = appendSplitAnd(list, x.X)
+ list = appendSplitAnd(list, x.Y)
+ return list
+ }
+ return append(list, x)
+}
+
+// appendSplitOr appends x to list while splitting apart any top-level || expressions.
+// For example, appendSplitOr({W}, X || Y || Z) = {W, X, Y, Z}.
+func appendSplitOr(list []Expr, x Expr) []Expr {
+ if x, ok := x.(*OrExpr); ok {
+ list = appendSplitOr(list, x.X)
+ list = appendSplitOr(list, x.Y)
+ return list
+ }
+ return append(list, x)
+}
diff --git a/src/go/build/constraint/expr_test.go b/src/go/build/constraint/expr_test.go
new file mode 100644
index 0000000..4979f8b
--- /dev/null
+++ b/src/go/build/constraint/expr_test.go
@@ -0,0 +1,317 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package constraint
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+var exprStringTests = []struct {
+ x Expr
+ out string
+}{
+ {
+ x: tag("abc"),
+ out: "abc",
+ },
+ {
+ x: not(tag("abc")),
+ out: "!abc",
+ },
+ {
+ x: not(and(tag("abc"), tag("def"))),
+ out: "!(abc && def)",
+ },
+ {
+ x: and(tag("abc"), or(tag("def"), tag("ghi"))),
+ out: "abc && (def || ghi)",
+ },
+ {
+ x: or(and(tag("abc"), tag("def")), tag("ghi")),
+ out: "(abc && def) || ghi",
+ },
+}
+
+func TestExprString(t *testing.T) {
+ for i, tt := range exprStringTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ s := tt.x.String()
+ if s != tt.out {
+ t.Errorf("String() mismatch:\nhave %s\nwant %s", s, tt.out)
+ }
+ })
+ }
+}
+
+var lexTests = []struct {
+ in string
+ out string
+}{
+ {"", ""},
+ {"x", "x"},
+ {"x.y", "x.y"},
+ {"x_y", "x_y"},
+ {"αx", "αx"},
+ {"αx²", "αx err: invalid syntax at ²"},
+ {"go1.2", "go1.2"},
+ {"x y", "x y"},
+ {"x!y", "x ! y"},
+ {"&&||!()xy yx ", "&& || ! ( ) xy yx"},
+ {"x~", "x err: invalid syntax at ~"},
+ {"x ~", "x err: invalid syntax at ~"},
+ {"x &", "x err: invalid syntax at &"},
+ {"x &y", "x err: invalid syntax at &"},
+}
+
+func TestLex(t *testing.T) {
+ for i, tt := range lexTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ p := &exprParser{s: tt.in}
+ out := ""
+ for {
+ tok, err := lexHelp(p)
+ if tok == "" && err == nil {
+ break
+ }
+ if out != "" {
+ out += " "
+ }
+ if err != nil {
+ out += "err: " + err.Error()
+ break
+ }
+ out += tok
+ }
+ if out != tt.out {
+ t.Errorf("lex(%q):\nhave %s\nwant %s", tt.in, out, tt.out)
+ }
+ })
+ }
+}
+
+func lexHelp(p *exprParser) (tok string, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if e, ok := e.(*SyntaxError); ok {
+ err = e
+ return
+ }
+ panic(e)
+ }
+ }()
+
+ p.lex()
+ return p.tok, nil
+}
+
+var parseExprTests = []struct {
+ in string
+ x Expr
+}{
+ {"x", tag("x")},
+ {"x&&y", and(tag("x"), tag("y"))},
+ {"x||y", or(tag("x"), tag("y"))},
+ {"(x)", tag("x")},
+ {"x||y&&z", or(tag("x"), and(tag("y"), tag("z")))},
+ {"x&&y||z", or(and(tag("x"), tag("y")), tag("z"))},
+ {"x&&(y||z)", and(tag("x"), or(tag("y"), tag("z")))},
+ {"(x||y)&&z", and(or(tag("x"), tag("y")), tag("z"))},
+ {"!(x&&y)", not(and(tag("x"), tag("y")))},
+}
+
+func TestParseExpr(t *testing.T) {
+ for i, tt := range parseExprTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if x.String() != tt.x.String() {
+ t.Errorf("parseExpr(%q):\nhave %s\nwant %s", tt.in, x, tt.x)
+ }
+ })
+ }
+}
+
+var parseExprErrorTests = []struct {
+ in string
+ err error
+}{
+ {"x && ", &SyntaxError{Offset: 5, Err: "unexpected end of expression"}},
+ {"x && (", &SyntaxError{Offset: 6, Err: "missing close paren"}},
+ {"x && ||", &SyntaxError{Offset: 5, Err: "unexpected token ||"}},
+ {"x && !", &SyntaxError{Offset: 6, Err: "unexpected end of expression"}},
+ {"x && !!", &SyntaxError{Offset: 6, Err: "double negation not allowed"}},
+ {"x !", &SyntaxError{Offset: 2, Err: "unexpected token !"}},
+ {"x && (y", &SyntaxError{Offset: 5, Err: "missing close paren"}},
+}
+
+func TestParseError(t *testing.T) {
+ for i, tt := range parseExprErrorTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err == nil {
+ t.Fatalf("parseExpr(%q) = %v, want error", tt.in, x)
+ }
+ if !reflect.DeepEqual(err, tt.err) {
+ t.Fatalf("parseExpr(%q): wrong error:\nhave %#v\nwant %#v", tt.in, err, tt.err)
+ }
+ })
+ }
+}
+
+var exprEvalTests = []struct {
+ in string
+ ok bool
+ tags string
+}{
+ {"x", false, "x"},
+ {"x && y", false, "x y"},
+ {"x || y", false, "x y"},
+ {"!x && yes", true, "x yes"},
+ {"yes || y", true, "y yes"},
+}
+
+func TestExprEval(t *testing.T) {
+ for i, tt := range exprEvalTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tags := make(map[string]bool)
+ wantTags := make(map[string]bool)
+ for _, tag := range strings.Fields(tt.tags) {
+ wantTags[tag] = true
+ }
+ hasTag := func(tag string) bool {
+ tags[tag] = true
+ return tag == "yes"
+ }
+ ok := x.Eval(hasTag)
+ if ok != tt.ok || !reflect.DeepEqual(tags, wantTags) {
+ t.Errorf("Eval(%#q):\nhave ok=%v, tags=%v\nwant ok=%v, tags=%v",
+ tt.in, ok, tags, tt.ok, wantTags)
+ }
+ })
+ }
+}
+
+var parsePlusBuildExprTests = []struct {
+ in string
+ x Expr
+}{
+ {"x", tag("x")},
+ {"x,y", and(tag("x"), tag("y"))},
+ {"x y", or(tag("x"), tag("y"))},
+ {"x y,z", or(tag("x"), and(tag("y"), tag("z")))},
+ {"x,y z", or(and(tag("x"), tag("y")), tag("z"))},
+ {"x,!y !z", or(and(tag("x"), not(tag("y"))), not(tag("z")))},
+ {"!! x", or(tag("ignore"), tag("x"))},
+ {"!!x", tag("ignore")},
+ {"!x", not(tag("x"))},
+ {"!", tag("ignore")},
+}
+
+func TestParsePlusBuildExpr(t *testing.T) {
+ for i, tt := range parsePlusBuildExprTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x := parsePlusBuildExpr(tt.in)
+ if x.String() != tt.x.String() {
+ t.Errorf("parsePlusBuildExpr(%q):\nhave %v\nwant %v", tt.in, x, tt.x)
+ }
+ })
+ }
+}
+
+var constraintTests = []struct {
+ in string
+ x Expr
+ err error
+}{
+ {"//+build x y", or(tag("x"), tag("y")), nil},
+ {"// +build x y \n", or(tag("x"), tag("y")), nil},
+ {"// +build x y \n ", nil, errNotConstraint},
+ {"// +build x y \nmore", nil, errNotConstraint},
+ {" //+build x y", nil, errNotConstraint},
+
+ {"//go:build x && y", and(tag("x"), tag("y")), nil},
+ {"//go:build x && y\n", and(tag("x"), tag("y")), nil},
+ {"//go:build x && y\n ", nil, errNotConstraint},
+ {"//go:build x && y\nmore", nil, errNotConstraint},
+ {" //go:build x && y", nil, errNotConstraint},
+}
+
+func TestParse(t *testing.T) {
+ for i, tt := range constraintTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := Parse(tt.in)
+ if err != nil {
+ if tt.err == nil {
+ t.Errorf("Constraint(%q): unexpected error: %v", tt.in, err)
+ } else if tt.err != err {
+ t.Errorf("Constraint(%q): error %v, want %v", tt.in, err, tt.err)
+ }
+ return
+ }
+ if tt.err != nil {
+ t.Errorf("Constraint(%q) = %v, want error %v", tt.in, x, tt.err)
+ return
+ }
+ if x.String() != tt.x.String() {
+ t.Errorf("Constraint(%q):\nhave %v\nwant %v", tt.in, x, tt.x)
+ }
+ })
+ }
+}
+
+var plusBuildLinesTests = []struct {
+ in string
+ out []string
+ err error
+}{
+ {"x", []string{"x"}, nil},
+ {"x && !y", []string{"x,!y"}, nil},
+ {"x || y", []string{"x y"}, nil},
+ {"x && (y || z)", []string{"x", "y z"}, nil},
+ {"!(x && y)", []string{"!x !y"}, nil},
+ {"x || (y && z)", []string{"x y,z"}, nil},
+ {"w && (x || (y && z))", []string{"w", "x y,z"}, nil},
+ {"v || (w && (x || (y && z)))", nil, errComplex},
+}
+
+func TestPlusBuildLines(t *testing.T) {
+ for i, tt := range plusBuildLinesTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ lines, err := PlusBuildLines(x)
+ if err != nil {
+ if tt.err == nil {
+ t.Errorf("PlusBuildLines(%q): unexpected error: %v", tt.in, err)
+ } else if tt.err != err {
+ t.Errorf("PlusBuildLines(%q): error %v, want %v", tt.in, err, tt.err)
+ }
+ return
+ }
+ if tt.err != nil {
+ t.Errorf("PlusBuildLines(%q) = %v, want error %v", tt.in, lines, tt.err)
+ return
+ }
+ var want []string
+ for _, line := range tt.out {
+ want = append(want, "// +build "+line)
+ }
+ if !reflect.DeepEqual(lines, want) {
+ t.Errorf("PlusBuildLines(%q):\nhave %q\nwant %q", tt.in, lines, want)
+ }
+ })
+ }
+}
diff --git a/src/go/build/deps_test.go b/src/go/build/deps_test.go
new file mode 100644
index 0000000..c97c668
--- /dev/null
+++ b/src/go/build/deps_test.go
@@ -0,0 +1,877 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file exercises the import parser but also checks that
+// some low-level packages do not have new dependencies added.
+
+package build
+
+import (
+ "bytes"
+ "fmt"
+ "go/token"
+ "internal/testenv"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+ "testing"
+)
+
+// depsRules defines the expected dependencies between packages in
+// the Go source tree. It is a statement of policy.
+//
+// DO NOT CHANGE THIS DATA TO FIX BUILDS.
+// Existing packages should not have their constraints relaxed
+// without prior discussion.
+// Negative assertions should almost never be removed.
+//
+// The general syntax of a rule is:
+//
+// a, b < c, d;
+//
+// which means c and d come after a and b in the partial order
+// (that is, c and d can import a and b),
+// but doesn't provide a relative order between a vs b or c vs d.
+//
+// The rules can chain together, as in:
+//
+// e < f, g < h;
+//
+// which is equivalent to
+//
+// e < f, g;
+// f, g < h;
+//
+// Except for the special bottom element "NONE", each name
+// must appear exactly once on the right-hand side of a rule.
+// That rule serves as the definition of the allowed dependencies
+// for that name. The definition must appear before any uses
+// of the name on the left-hand side of a rule. (That is, the
+// rules themselves must be ordered according to the partial
+// order, for easier reading by people.)
+//
+// Negative assertions double-check the partial order:
+//
+// i !< j
+//
+// means that it must NOT be the case that i < j.
+// Negative assertions may appear anywhere in the rules,
+// even before i and j have been defined.
+//
+// Comments begin with #.
+//
+// All-caps names are pseudo-names for specific points
+// in the dependency lattice.
+//
+var depsRules = `
+ # No dependencies allowed for any of these packages.
+ NONE
+ < container/list, container/ring,
+ internal/cfg, internal/cpu,
+ internal/goversion, internal/nettrace,
+ unicode/utf8, unicode/utf16, unicode,
+ unsafe;
+
+ # RUNTIME is the core runtime group of packages, all of them very light-weight.
+ internal/cpu, unsafe
+ < internal/bytealg
+ < internal/unsafeheader
+ < runtime/internal/sys
+ < runtime/internal/atomic
+ < runtime/internal/math
+ < runtime
+ < sync/atomic
+ < internal/race
+ < sync
+ < internal/reflectlite
+ < errors
+ < internal/oserror, math/bits
+ < RUNTIME;
+
+ RUNTIME
+ < sort
+ < container/heap;
+
+ RUNTIME
+ < io;
+
+ syscall !< io;
+ reflect !< sort;
+
+ RUNTIME, unicode/utf8
+ < path;
+
+ unicode !< path;
+
+ # SYSCALL is RUNTIME plus the packages necessary for basic system calls.
+ RUNTIME, unicode/utf8, unicode/utf16
+ < internal/syscall/windows/sysdll, syscall/js
+ < syscall
+ < internal/syscall/unix, internal/syscall/windows, internal/syscall/windows/registry
+ < internal/syscall/execenv
+ < SYSCALL;
+
+ # TIME is SYSCALL plus the core packages about time, including context.
+ SYSCALL
+ < time/tzdata
+ < time
+ < context
+ < TIME;
+
+ TIME, io, path, sort
+ < io/fs;
+
+ # MATH is RUNTIME plus the basic math packages.
+ RUNTIME
+ < math
+ < MATH;
+
+ unicode !< math;
+
+ MATH
+ < math/cmplx;
+
+ MATH
+ < math/rand;
+
+ MATH
+ < runtime/metrics;
+
+ MATH, unicode/utf8
+ < strconv;
+
+ unicode !< strconv;
+
+ # STR is basic string and buffer manipulation.
+ RUNTIME, io, unicode/utf8, unicode/utf16, unicode
+ < bytes, strings
+ < bufio;
+
+ bufio, path, strconv
+ < STR;
+
+ # OS is basic OS access, including helpers (path/filepath, os/exec, etc).
+ # OS includes string routines, but those must be layered above package os.
+ # OS does not include reflection.
+ io/fs
+ < internal/testlog
+ < internal/poll
+ < os
+ < os/signal;
+
+ io/fs
+ < embed;
+
+ unicode, fmt !< os, os/signal;
+
+ os/signal, STR
+ < path/filepath
+ < io/ioutil, os/exec;
+
+ io/ioutil, os/exec, os/signal
+ < OS;
+
+ reflect !< OS;
+
+ OS
+ < golang.org/x/sys/cpu;
+
+ # FMT is OS (which includes string routines) plus reflect and fmt.
+ # It does not include package log, which should be avoided in core packages.
+ strconv, unicode
+ < reflect;
+
+ os, reflect
+ < internal/fmtsort
+ < fmt;
+
+ OS, fmt
+ < FMT;
+
+ log !< FMT;
+
+ OS, FMT
+ < internal/execabs;
+
+ OS, internal/execabs
+ < internal/goroot;
+
+ # Misc packages needing only FMT.
+ FMT
+ < flag,
+ html,
+ mime/quotedprintable,
+ net/internal/socktest,
+ net/url,
+ runtime/debug,
+ runtime/trace,
+ text/scanner,
+ text/tabwriter;
+
+ # encodings
+ # core ones do not use fmt.
+ io, strconv
+ < encoding;
+
+ encoding, reflect
+ < encoding/binary
+ < encoding/base32, encoding/base64;
+
+ fmt !< encoding/base32, encoding/base64;
+
+ FMT, encoding/base32, encoding/base64
+ < encoding/ascii85, encoding/csv, encoding/gob, encoding/hex,
+ encoding/json, encoding/pem, encoding/xml, mime;
+
+ # hashes
+ io
+ < hash
+ < hash/adler32, hash/crc32, hash/crc64, hash/fnv, hash/maphash;
+
+ # math/big
+ FMT, encoding/binary, math/rand
+ < math/big;
+
+ # compression
+ FMT, encoding/binary, hash/adler32, hash/crc32
+ < compress/bzip2, compress/flate, compress/lzw
+ < archive/zip, compress/gzip, compress/zlib;
+
+ # templates
+ FMT
+ < text/template/parse;
+
+ net/url, text/template/parse
+ < text/template
+ < internal/lazytemplate;
+
+ encoding/json, html, text/template
+ < html/template;
+
+ # regexp
+ FMT
+ < regexp/syntax
+ < regexp
+ < internal/lazyregexp;
+
+ # suffix array
+ encoding/binary, regexp
+ < index/suffixarray;
+
+ # executable parsing
+ FMT, encoding/binary, compress/zlib
+ < debug/dwarf
+ < debug/elf, debug/gosym, debug/macho, debug/pe, debug/plan9obj, internal/xcoff
+ < DEBUG;
+
+ # go parser and friends.
+ FMT
+ < go/token
+ < go/scanner
+ < go/ast
+ < go/parser;
+
+ go/parser, text/tabwriter
+ < go/printer
+ < go/format;
+
+ go/parser, internal/lazyregexp, text/template
+ < go/doc;
+
+ math/big, go/token
+ < go/constant;
+
+ container/heap, go/constant, go/parser
+ < go/types;
+
+ FMT
+ < go/build/constraint;
+
+ go/doc, go/parser, internal/goroot, internal/goversion
+ < go/build;
+
+ DEBUG, go/build, go/types, text/scanner
+ < go/internal/gcimporter, go/internal/gccgoimporter, go/internal/srcimporter
+ < go/importer;
+
+ # databases
+ FMT
+ < database/sql/internal
+ < database/sql/driver
+ < database/sql;
+
+ # images
+ FMT, compress/lzw, compress/zlib
+ < image/color
+ < image, image/color/palette
+ < image/internal/imageutil
+ < image/draw
+ < image/gif, image/jpeg, image/png;
+
+ # cgo, delayed as long as possible.
+ # If you add a dependency on CGO, you must add the package
+ # to cgoPackages in cmd/dist/test.go as well.
+ RUNTIME
+ < C
+ < runtime/cgo
+ < CGO
+ < runtime/race, runtime/msan;
+
+ # Bulk of the standard library must not use cgo.
+ # The prohibition stops at net and os/user.
+ C !< fmt, go/types, CRYPTO-MATH;
+
+ CGO, OS
+ < plugin;
+
+ CGO, FMT
+ < os/user
+ < archive/tar;
+
+ sync
+ < internal/singleflight;
+
+ os
+ < golang.org/x/net/dns/dnsmessage,
+ golang.org/x/net/lif,
+ golang.org/x/net/route;
+
+ # net is unavoidable when doing any networking,
+ # so large dependencies must be kept out.
+ # This is a long-looking list but most of these
+ # are small with few dependencies.
+ CGO,
+ golang.org/x/net/dns/dnsmessage,
+ golang.org/x/net/lif,
+ golang.org/x/net/route,
+ internal/nettrace,
+ internal/poll,
+ internal/singleflight,
+ internal/race,
+ os
+ < net;
+
+ fmt, unicode !< net;
+ math/rand !< net; # net uses runtime instead
+
+ # NET is net plus net-helper packages.
+ FMT, net
+ < net/textproto;
+
+ mime, net/textproto, net/url
+ < NET;
+
+ # logging - most packages should not import; http and up is allowed
+ FMT
+ < log;
+
+ log !< crypto/tls, database/sql, go/importer, testing;
+
+ FMT, log, net
+ < log/syslog;
+
+ NET, log
+ < net/mail;
+
+ # CRYPTO is core crypto algorithms - no cgo, fmt, net.
+ # Unfortunately, stuck with reflect via encoding/binary.
+ encoding/binary, golang.org/x/sys/cpu, hash
+ < crypto
+ < crypto/subtle
+ < crypto/internal/subtle
+ < crypto/cipher
+ < crypto/aes, crypto/des, crypto/hmac, crypto/md5, crypto/rc4,
+ crypto/sha1, crypto/sha256, crypto/sha512
+ < CRYPTO;
+
+ CGO, fmt, net !< CRYPTO;
+
+ # CRYPTO-MATH is core bignum-based crypto - no cgo, net; fmt now ok.
+ CRYPTO, FMT, math/big
+ < crypto/rand
+ < crypto/internal/randutil
+ < crypto/ed25519/internal/edwards25519
+ < crypto/ed25519
+ < encoding/asn1
+ < golang.org/x/crypto/cryptobyte/asn1
+ < golang.org/x/crypto/cryptobyte
+ < golang.org/x/crypto/curve25519
+ < crypto/dsa, crypto/elliptic, crypto/rsa
+ < crypto/ecdsa
+ < CRYPTO-MATH;
+
+ CGO, net !< CRYPTO-MATH;
+
+ # TLS, Prince of Dependencies.
+ CRYPTO-MATH, NET, container/list, encoding/hex, encoding/pem
+ < golang.org/x/crypto/internal/subtle
+ < golang.org/x/crypto/chacha20
+ < golang.org/x/crypto/poly1305
+ < golang.org/x/crypto/chacha20poly1305
+ < golang.org/x/crypto/hkdf
+ < crypto/x509/internal/macos
+ < crypto/x509/pkix
+ < crypto/x509
+ < crypto/tls;
+
+ # crypto-aware packages
+
+ NET, crypto/rand, mime/quotedprintable
+ < mime/multipart;
+
+ crypto/tls
+ < net/smtp;
+
+ # HTTP, King of Dependencies.
+
+ FMT
+ < golang.org/x/net/http2/hpack, net/http/internal;
+
+ FMT, NET, container/list, encoding/binary, log
+ < golang.org/x/text/transform
+ < golang.org/x/text/unicode/norm
+ < golang.org/x/text/unicode/bidi
+ < golang.org/x/text/secure/bidirule
+ < golang.org/x/net/idna
+ < golang.org/x/net/http/httpguts, golang.org/x/net/http/httpproxy;
+
+ NET, crypto/tls
+ < net/http/httptrace;
+
+ compress/gzip,
+ golang.org/x/net/http/httpguts,
+ golang.org/x/net/http/httpproxy,
+ golang.org/x/net/http2/hpack,
+ net/http/internal,
+ net/http/httptrace,
+ mime/multipart,
+ log
+ < net/http;
+
+ # HTTP-aware packages
+
+ encoding/json, net/http
+ < expvar;
+
+ net/http
+ < net/http/cookiejar, net/http/httputil;
+
+ net/http, flag
+ < net/http/httptest;
+
+ net/http, regexp
+ < net/http/cgi
+ < net/http/fcgi;
+
+ # Profiling
+ FMT, compress/gzip, encoding/binary, text/tabwriter
+ < runtime/pprof;
+
+ OS, compress/gzip, regexp
+ < internal/profile;
+
+ html, internal/profile, net/http, runtime/pprof, runtime/trace
+ < net/http/pprof;
+
+ # RPC
+ encoding/gob, encoding/json, go/token, html/template, net/http
+ < net/rpc
+ < net/rpc/jsonrpc;
+
+ # System Information
+ internal/cpu, sync
+ < internal/sysinfo;
+
+ # Test-only
+ log
+ < testing/iotest
+ < testing/fstest;
+
+ FMT, flag, math/rand
+ < testing/quick;
+
+ FMT, flag, runtime/debug, runtime/trace, internal/sysinfo
+ < testing;
+
+ internal/testlog, runtime/pprof, regexp
+ < testing/internal/testdeps;
+
+ OS, flag, testing, internal/cfg
+ < internal/testenv;
+
+ OS, encoding/base64
+ < internal/obscuretestdata;
+
+ CGO, OS, fmt
+ < os/signal/internal/pty;
+
+ NET, testing, math/rand
+ < golang.org/x/net/nettest;
+
+ FMT, container/heap, math/rand
+ < internal/trace;
+`
+
+// listStdPkgs returns the same list of packages as "go list std".
+func listStdPkgs(goroot string) ([]string, error) {
+ // Based on cmd/go's matchPackages function.
+ var pkgs []string
+
+ src := filepath.Join(goroot, "src") + string(filepath.Separator)
+ walkFn := func(path string, d fs.DirEntry, err error) error {
+ if err != nil || !d.IsDir() || path == src {
+ return nil
+ }
+
+ base := filepath.Base(path)
+ if strings.HasPrefix(base, ".") || strings.HasPrefix(base, "_") || base == "testdata" {
+ return filepath.SkipDir
+ }
+
+ name := filepath.ToSlash(path[len(src):])
+ if name == "builtin" || name == "cmd" {
+ return filepath.SkipDir
+ }
+
+ pkgs = append(pkgs, strings.TrimPrefix(name, "vendor/"))
+ return nil
+ }
+ if err := filepath.WalkDir(src, walkFn); err != nil {
+ return nil, err
+ }
+ return pkgs, nil
+}
+
+func TestDependencies(t *testing.T) {
+ if !testenv.HasSrc() {
+ // Tests run in a limited file system and we do not
+ // provide access to every source file.
+ t.Skipf("skipping on %s/%s, missing full GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ ctxt := Default
+ all, err := listStdPkgs(ctxt.GOROOT)
+ if err != nil {
+ t.Fatal(err)
+ }
+ sort.Strings(all)
+
+ sawImport := map[string]map[string]bool{} // from package => to package => true
+ policy := depsPolicy(t)
+
+ for _, pkg := range all {
+ imports, err := findImports(pkg)
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+ if sawImport[pkg] == nil {
+ sawImport[pkg] = map[string]bool{}
+ }
+ ok := policy[pkg]
+ var bad []string
+ for _, imp := range imports {
+ sawImport[pkg][imp] = true
+ if !ok[imp] {
+ bad = append(bad, imp)
+ }
+ }
+ if bad != nil {
+ t.Errorf("unexpected dependency: %s imports %v", pkg, bad)
+ }
+ }
+
+ // depPath returns the path between the given from and to packages.
+ // It returns the empty string if there's no dependency path.
+ var depPath func(string, string) string
+ depPath = func(from, to string) string {
+ if sawImport[from][to] {
+ return from + " => " + to
+ }
+ for pkg := range sawImport[from] {
+ if p := depPath(pkg, to); p != "" {
+ return from + " => " + p
+ }
+ }
+ return ""
+ }
+}
+
+var buildIgnore = []byte("\n// +build ignore")
+
+func findImports(pkg string) ([]string, error) {
+ vpkg := pkg
+ if strings.HasPrefix(pkg, "golang.org") {
+ vpkg = "vendor/" + pkg
+ }
+ dir := filepath.Join(Default.GOROOT, "src", vpkg)
+ files, err := os.ReadDir(dir)
+ if err != nil {
+ return nil, err
+ }
+ var imports []string
+ var haveImport = map[string]bool{}
+ fset := token.NewFileSet()
+ for _, file := range files {
+ name := file.Name()
+ if name == "slice_go14.go" || name == "slice_go18.go" {
+ // These files are for compiler bootstrap with older versions of Go and not built in the standard build.
+ continue
+ }
+ if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ info := fileInfo{
+ name: filepath.Join(dir, name),
+ fset: fset,
+ }
+ f, err := os.Open(info.name)
+ if err != nil {
+ return nil, err
+ }
+ err = readGoInfo(f, &info)
+ f.Close()
+ if err != nil {
+ return nil, fmt.Errorf("reading %v: %v", name, err)
+ }
+ if bytes.Contains(info.header, buildIgnore) {
+ continue
+ }
+ for _, imp := range info.imports {
+ path := imp.path
+ if !haveImport[path] {
+ haveImport[path] = true
+ imports = append(imports, path)
+ }
+ }
+ }
+ sort.Strings(imports)
+ return imports, nil
+}
+
+// depsPolicy returns a map m such that m[p][d] == true when p can import d.
+func depsPolicy(t *testing.T) map[string]map[string]bool {
+ allowed := map[string]map[string]bool{"NONE": {}}
+ disallowed := [][2][]string{}
+
+ parseDepsRules(t, func(deps []string, op string, users []string) {
+ if op == "!<" {
+ disallowed = append(disallowed, [2][]string{deps, users})
+ return
+ }
+ for _, u := range users {
+ if allowed[u] != nil {
+ t.Errorf("multiple deps lists for %s", u)
+ }
+ allowed[u] = make(map[string]bool)
+ for _, d := range deps {
+ if allowed[d] == nil {
+ t.Errorf("use of %s before its deps list", d)
+ }
+ allowed[u][d] = true
+ }
+ }
+ })
+
+ // Check for missing deps info.
+ for _, deps := range allowed {
+ for d := range deps {
+ if allowed[d] == nil {
+ t.Errorf("missing deps list for %s", d)
+ }
+ }
+ }
+
+ // Complete transitive allowed deps.
+ for k := range allowed {
+ for i := range allowed {
+ for j := range allowed {
+ if i != k && k != j && allowed[i][k] && allowed[k][j] {
+ if i == j {
+ // Can only happen along with a "use of X before deps" error above,
+ // but this error is more specific - it makes clear that reordering the
+ // rules will not be enough to fix the problem.
+ t.Errorf("deps policy cycle: %s < %s < %s", j, k, i)
+ }
+ allowed[i][j] = true
+ }
+ }
+ }
+ }
+
+ // Check negative assertions against completed allowed deps.
+ for _, bad := range disallowed {
+ deps, users := bad[0], bad[1]
+ for _, d := range deps {
+ for _, u := range users {
+ if allowed[u][d] {
+ t.Errorf("deps policy incorrect: assertion failed: %s !< %s", d, u)
+ }
+ }
+ }
+ }
+
+ if t.Failed() {
+ t.FailNow()
+ }
+
+ return allowed
+}
+
+// parseDepsRules parses depsRules, calling save(deps, op, users)
+// for each deps < users or deps !< users rule
+// (op is "<" or "!<").
+func parseDepsRules(t *testing.T, save func(deps []string, op string, users []string)) {
+ p := &depsParser{t: t, lineno: 1, text: depsRules}
+
+ var prev []string
+ var op string
+ for {
+ list, tok := p.nextList()
+ if tok == "" {
+ if prev == nil {
+ break
+ }
+ p.syntaxError("unexpected EOF")
+ }
+ if prev != nil {
+ save(prev, op, list)
+ }
+ prev = list
+ if tok == ";" {
+ prev = nil
+ op = ""
+ continue
+ }
+ if tok != "<" && tok != "!<" {
+ p.syntaxError("missing <")
+ }
+ op = tok
+ }
+}
+
+// A depsParser parses the depsRules syntax described above.
+type depsParser struct {
+ t *testing.T
+ lineno int
+ lastWord string
+ text string
+}
+
+// syntaxError reports a parsing error.
+func (p *depsParser) syntaxError(msg string) {
+ p.t.Fatalf("deps:%d: syntax error: %s near %s", p.lineno, msg, p.lastWord)
+}
+
+// nextList parses and returns a comma-separated list of names.
+func (p *depsParser) nextList() (list []string, token string) {
+ for {
+ tok := p.nextToken()
+ switch tok {
+ case "":
+ if len(list) == 0 {
+ return nil, ""
+ }
+ fallthrough
+ case ",", "<", "!<", ";":
+ p.syntaxError("bad list syntax")
+ }
+ list = append(list, tok)
+
+ tok = p.nextToken()
+ if tok != "," {
+ return list, tok
+ }
+ }
+}
+
+// nextToken returns the next token in the deps rules,
+// one of ";" "," "<" "!<" or a name.
+func (p *depsParser) nextToken() string {
+ for {
+ if p.text == "" {
+ return ""
+ }
+ switch p.text[0] {
+ case ';', ',', '<':
+ t := p.text[:1]
+ p.text = p.text[1:]
+ return t
+
+ case '!':
+ if len(p.text) < 2 || p.text[1] != '<' {
+ p.syntaxError("unexpected token !")
+ }
+ p.text = p.text[2:]
+ return "!<"
+
+ case '#':
+ i := strings.Index(p.text, "\n")
+ if i < 0 {
+ i = len(p.text)
+ }
+ p.text = p.text[i:]
+ continue
+
+ case '\n':
+ p.lineno++
+ fallthrough
+ case ' ', '\t':
+ p.text = p.text[1:]
+ continue
+
+ default:
+ i := strings.IndexAny(p.text, "!;,<#\n \t")
+ if i < 0 {
+ i = len(p.text)
+ }
+ t := p.text[:i]
+ p.text = p.text[i:]
+ p.lastWord = t
+ return t
+ }
+ }
+}
+
+// TestStdlibLowercase tests that all standard library package names are
+// lowercase. See Issue 40065.
+func TestStdlibLowercase(t *testing.T) {
+ if !testenv.HasSrc() {
+ t.Skipf("skipping on %s/%s, missing full GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ ctxt := Default
+ all, err := listStdPkgs(ctxt.GOROOT)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ for _, pkgname := range all {
+ if strings.ToLower(pkgname) != pkgname {
+ t.Errorf("package %q should not use upper-case path", pkgname)
+ }
+ }
+}
+
+// TestFindImports tests that findImports works. See #43249.
+func TestFindImports(t *testing.T) {
+ imports, err := findImports("go/build")
+ if err != nil {
+ t.Fatal(err)
+ }
+ t.Logf("go/build imports %q", imports)
+ want := []string{"bytes", "os", "path/filepath", "strings"}
+wantLoop:
+ for _, w := range want {
+ for _, imp := range imports {
+ if imp == w {
+ continue wantLoop
+ }
+ }
+ t.Errorf("expected to find %q in import list", w)
+ }
+}
diff --git a/src/go/build/doc.go b/src/go/build/doc.go
new file mode 100644
index 0000000..2c6f0a8
--- /dev/null
+++ b/src/go/build/doc.go
@@ -0,0 +1,98 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package build gathers information about Go packages.
+//
+// Go Path
+//
+// The Go path is a list of directory trees containing Go source code.
+// It is consulted to resolve imports that cannot be found in the standard
+// Go tree. The default path is the value of the GOPATH environment
+// variable, interpreted as a path list appropriate to the operating system
+// (on Unix, the variable is a colon-separated string;
+// on Windows, a semicolon-separated string;
+// on Plan 9, a list).
+//
+// Each directory listed in the Go path must have a prescribed structure:
+//
+// The src/ directory holds source code. The path below 'src' determines
+// the import path or executable name.
+//
+// The pkg/ directory holds installed package objects.
+// As in the Go tree, each target operating system and
+// architecture pair has its own subdirectory of pkg
+// (pkg/GOOS_GOARCH).
+//
+// If DIR is a directory listed in the Go path, a package with
+// source in DIR/src/foo/bar can be imported as "foo/bar" and
+// has its compiled form installed to "DIR/pkg/GOOS_GOARCH/foo/bar.a"
+// (or, for gccgo, "DIR/pkg/gccgo/foo/libbar.a").
+//
+// The bin/ directory holds compiled commands.
+// Each command is named for its source directory, but only
+// using the final element, not the entire path. That is, the
+// command with source in DIR/src/foo/quux is installed into
+// DIR/bin/quux, not DIR/bin/foo/quux. The foo/ is stripped
+// so that you can add DIR/bin to your PATH to get at the
+// installed commands.
+//
+// Here's an example directory layout:
+//
+// GOPATH=/home/user/gocode
+//
+// /home/user/gocode/
+// src/
+// foo/
+// bar/ (go code in package bar)
+// x.go
+// quux/ (go code in package main)
+// y.go
+// bin/
+// quux (installed command)
+// pkg/
+// linux_amd64/
+// foo/
+// bar.a (installed package object)
+//
+// Build Constraints
+//
+// A build constraint, also known as a build tag, is a line comment that begins
+//
+// // +build
+//
+// that lists the conditions under which a file should be included in the
+// package. Build constraints may also be part of a file's name
+// (for example, source_windows.go will only be included if the target
+// operating system is windows).
+//
+// See 'go help buildconstraint'
+// (https://golang.org/cmd/go/#hdr-Build_constraints) for details.
+//
+// Binary-Only Packages
+//
+// In Go 1.12 and earlier, it was possible to distribute packages in binary
+// form without including the source code used for compiling the package.
+// The package was distributed with a source file not excluded by build
+// constraints and containing a "//go:binary-only-package" comment. Like a
+// build constraint, this comment appeared at the top of a file, preceded
+// only by blank lines and other line comments and with a blank line
+// following the comment, to separate it from the package documentation.
+// Unlike build constraints, this comment is only recognized in non-test
+// Go source files.
+//
+// The minimal source code for a binary-only package was therefore:
+//
+// //go:binary-only-package
+//
+// package mypkg
+//
+// The source code could include additional Go code. That code was never
+// compiled but would be processed by tools like godoc and might be useful
+// as end-user documentation.
+//
+// "go build" and other commands no longer support binary-only-packages.
+// Import and ImportDir will still set the BinaryOnly flag in packages
+// containing these comments for use in tools and error messages.
+//
+package build
diff --git a/src/go/build/gc.go b/src/go/build/gc.go
new file mode 100644
index 0000000..3025cd5
--- /dev/null
+++ b/src/go/build/gc.go
@@ -0,0 +1,17 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build gc
+
+package build
+
+import (
+ "path/filepath"
+ "runtime"
+)
+
+// getToolDir returns the default value of ToolDir.
+func getToolDir() string {
+ return filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)
+}
diff --git a/src/go/build/gccgo.go b/src/go/build/gccgo.go
new file mode 100644
index 0000000..c6aac9a
--- /dev/null
+++ b/src/go/build/gccgo.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build gccgo
+
+package build
+
+import "runtime"
+
+// getToolDir returns the default value of ToolDir.
+func getToolDir() string {
+ return envOr("GCCGOTOOLDIR", runtime.GCCGOTOOLDIR)
+}
diff --git a/src/go/build/read.go b/src/go/build/read.go
new file mode 100644
index 0000000..aa7c6ee
--- /dev/null
+++ b/src/go/build/read.go
@@ -0,0 +1,546 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "io"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+type importReader struct {
+ b *bufio.Reader
+ buf []byte
+ peek byte
+ err error
+ eof bool
+ nerr int
+ pos token.Position
+}
+
+func newImportReader(name string, r io.Reader) *importReader {
+ return &importReader{
+ b: bufio.NewReader(r),
+ pos: token.Position{
+ Filename: name,
+ Line: 1,
+ Column: 1,
+ },
+ }
+}
+
+func isIdent(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '_' || c >= utf8.RuneSelf
+}
+
+var (
+ errSyntax = errors.New("syntax error")
+ errNUL = errors.New("unexpected NUL in input")
+)
+
+// syntaxError records a syntax error, but only if an I/O error has not already been recorded.
+func (r *importReader) syntaxError() {
+ if r.err == nil {
+ r.err = errSyntax
+ }
+}
+
+// readByte reads the next byte from the input, saves it in buf, and returns it.
+// If an error occurs, readByte records the error in r.err and returns 0.
+func (r *importReader) readByte() byte {
+ c, err := r.b.ReadByte()
+ if err == nil {
+ r.buf = append(r.buf, c)
+ if c == 0 {
+ err = errNUL
+ }
+ }
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ c = 0
+ }
+ return c
+}
+
+// readByteNoBuf is like readByte but doesn't buffer the byte.
+// It exhausts r.buf before reading from r.b.
+func (r *importReader) readByteNoBuf() byte {
+ var c byte
+ var err error
+ if len(r.buf) > 0 {
+ c = r.buf[0]
+ r.buf = r.buf[1:]
+ } else {
+ c, err = r.b.ReadByte()
+ if err == nil && c == 0 {
+ err = errNUL
+ }
+ }
+
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ return 0
+ }
+ r.pos.Offset++
+ if c == '\n' {
+ r.pos.Line++
+ r.pos.Column = 1
+ } else {
+ r.pos.Column++
+ }
+ return c
+}
+
+// peekByte returns the next byte from the input reader but does not advance beyond it.
+// If skipSpace is set, peekByte skips leading spaces and comments.
+func (r *importReader) peekByte(skipSpace bool) byte {
+ if r.err != nil {
+ if r.nerr++; r.nerr > 10000 {
+ panic("go/build: import reader looping")
+ }
+ return 0
+ }
+
+ // Use r.peek as first input byte.
+ // Don't just return r.peek here: it might have been left by peekByte(false)
+ // and this might be peekByte(true).
+ c := r.peek
+ if c == 0 {
+ c = r.readByte()
+ }
+ for r.err == nil && !r.eof {
+ if skipSpace {
+ // For the purposes of this reader, semicolons are never necessary to
+ // understand the input and are treated as spaces.
+ switch c {
+ case ' ', '\f', '\t', '\r', '\n', ';':
+ c = r.readByte()
+ continue
+
+ case '/':
+ c = r.readByte()
+ if c == '/' {
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByte()
+ }
+ } else if c == '*' {
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByte()
+ }
+ } else {
+ r.syntaxError()
+ }
+ c = r.readByte()
+ continue
+ }
+ }
+ break
+ }
+ r.peek = c
+ return r.peek
+}
+
+// nextByte is like peekByte but advances beyond the returned byte.
+func (r *importReader) nextByte(skipSpace bool) byte {
+ c := r.peekByte(skipSpace)
+ r.peek = 0
+ return c
+}
+
+var goEmbed = []byte("go:embed")
+
+// findEmbed advances the input reader to the next //go:embed comment.
+// It reports whether it found a comment.
+// (Otherwise it found an error or EOF.)
+func (r *importReader) findEmbed(first bool) bool {
+ // The import block scan stopped after a non-space character,
+ // so the reader is not at the start of a line on the first call.
+ // After that, each //go:embed extraction leaves the reader
+ // at the end of a line.
+ startLine := !first
+ var c byte
+ for r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ Reswitch:
+ switch c {
+ default:
+ startLine = false
+
+ case '\n':
+ startLine = true
+
+ case ' ', '\t':
+ // leave startLine alone
+
+ case '"':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '\\' {
+ r.readByteNoBuf()
+ if r.err != nil {
+ r.syntaxError()
+ return false
+ }
+ continue
+ }
+ if c == '"' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+ goto Reswitch
+
+ case '`':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '`' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+
+ case '/':
+ c = r.readByteNoBuf()
+ switch c {
+ default:
+ startLine = false
+ goto Reswitch
+
+ case '*':
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByteNoBuf()
+ }
+ startLine = false
+
+ case '/':
+ if startLine {
+ // Try to read this as a //go:embed comment.
+ for i := range goEmbed {
+ c = r.readByteNoBuf()
+ if c != goEmbed[i] {
+ goto SkipSlashSlash
+ }
+ }
+ c = r.readByteNoBuf()
+ if c == ' ' || c == '\t' {
+ // Found one!
+ return true
+ }
+ }
+ SkipSlashSlash:
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ }
+ startLine = true
+ }
+ }
+ }
+ return false
+}
+
+// readKeyword reads the given keyword from the input.
+// If the keyword is not present, readKeyword records a syntax error.
+func (r *importReader) readKeyword(kw string) {
+ r.peekByte(true)
+ for i := 0; i < len(kw); i++ {
+ if r.nextByte(false) != kw[i] {
+ r.syntaxError()
+ return
+ }
+ }
+ if isIdent(r.peekByte(false)) {
+ r.syntaxError()
+ }
+}
+
+// readIdent reads an identifier from the input.
+// If an identifier is not present, readIdent records a syntax error.
+func (r *importReader) readIdent() {
+ c := r.peekByte(true)
+ if !isIdent(c) {
+ r.syntaxError()
+ return
+ }
+ for isIdent(r.peekByte(false)) {
+ r.peek = 0
+ }
+}
+
+// readString reads a quoted string literal from the input.
+// If an identifier is not present, readString records a syntax error.
+func (r *importReader) readString() {
+ switch r.nextByte(true) {
+ case '`':
+ for r.err == nil {
+ if r.nextByte(false) == '`' {
+ break
+ }
+ if r.eof {
+ r.syntaxError()
+ }
+ }
+ case '"':
+ for r.err == nil {
+ c := r.nextByte(false)
+ if c == '"' {
+ break
+ }
+ if r.eof || c == '\n' {
+ r.syntaxError()
+ }
+ if c == '\\' {
+ r.nextByte(false)
+ }
+ }
+ default:
+ r.syntaxError()
+ }
+}
+
+// readImport reads an import clause - optional identifier followed by quoted string -
+// from the input.
+func (r *importReader) readImport() {
+ c := r.peekByte(true)
+ if c == '.' {
+ r.peek = 0
+ } else if isIdent(c) {
+ r.readIdent()
+ }
+ r.readString()
+}
+
+// readComments is like io.ReadAll, except that it only reads the leading
+// block of comments in the file.
+func readComments(f io.Reader) ([]byte, error) {
+ r := newImportReader("", f)
+ r.peekByte(true)
+ if r.err == nil && !r.eof {
+ // Didn't reach EOF, so must have found a non-space byte. Remove it.
+ r.buf = r.buf[:len(r.buf)-1]
+ }
+ return r.buf, r.err
+}
+
+// readGoInfo expects a Go file as input and reads the file up to and including the import section.
+// It records what it learned in *info.
+// If info.fset is non-nil, readGoInfo parses the file and sets info.parsed, info.parseErr,
+// info.imports, info.embeds, and info.embedErr.
+//
+// It only returns an error if there are problems reading the file,
+// not for syntax errors in the file itself.
+func readGoInfo(f io.Reader, info *fileInfo) error {
+ r := newImportReader(info.name, f)
+
+ r.readKeyword("package")
+ r.readIdent()
+ for r.peekByte(true) == 'i' {
+ r.readKeyword("import")
+ if r.peekByte(true) == '(' {
+ r.nextByte(false)
+ for r.peekByte(true) != ')' && r.err == nil {
+ r.readImport()
+ }
+ r.nextByte(false)
+ } else {
+ r.readImport()
+ }
+ }
+
+ info.header = r.buf
+
+ // If we stopped successfully before EOF, we read a byte that told us we were done.
+ // Return all but that last byte, which would cause a syntax error if we let it through.
+ if r.err == nil && !r.eof {
+ info.header = r.buf[:len(r.buf)-1]
+ }
+
+ // If we stopped for a syntax error, consume the whole file so that
+ // we are sure we don't change the errors that go/parser returns.
+ if r.err == errSyntax {
+ r.err = nil
+ for r.err == nil && !r.eof {
+ r.readByte()
+ }
+ info.header = r.buf
+ }
+ if r.err != nil {
+ return r.err
+ }
+
+ if info.fset == nil {
+ return nil
+ }
+
+ // Parse file header & record imports.
+ info.parsed, info.parseErr = parser.ParseFile(info.fset, info.name, info.header, parser.ImportsOnly|parser.ParseComments)
+ if info.parseErr != nil {
+ return nil
+ }
+
+ hasEmbed := false
+ for _, decl := range info.parsed.Decls {
+ d, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+ for _, dspec := range d.Specs {
+ spec, ok := dspec.(*ast.ImportSpec)
+ if !ok {
+ continue
+ }
+ quoted := spec.Path.Value
+ path, err := strconv.Unquote(quoted)
+ if err != nil {
+ return fmt.Errorf("parser returned invalid quoted string: <%s>", quoted)
+ }
+ if path == "embed" {
+ hasEmbed = true
+ }
+
+ doc := spec.Doc
+ if doc == nil && len(d.Specs) == 1 {
+ doc = d.Doc
+ }
+ info.imports = append(info.imports, fileImport{path, spec.Pos(), doc})
+ }
+ }
+
+ // If the file imports "embed",
+ // we have to look for //go:embed comments
+ // in the remainder of the file.
+ // The compiler will enforce the mapping of comments to
+ // declared variables. We just need to know the patterns.
+ // If there were //go:embed comments earlier in the file
+ // (near the package statement or imports), the compiler
+ // will reject them. They can be (and have already been) ignored.
+ if hasEmbed {
+ var line []byte
+ for first := true; r.findEmbed(first); first = false {
+ line = line[:0]
+ pos := r.pos
+ for {
+ c := r.readByteNoBuf()
+ if c == '\n' || r.err != nil || r.eof {
+ break
+ }
+ line = append(line, c)
+ }
+ // Add args if line is well-formed.
+ // Ignore badly-formed lines - the compiler will report them when it finds them,
+ // and we can pretend they are not there to help go list succeed with what it knows.
+ embs, err := parseGoEmbed(string(line), pos)
+ if err == nil {
+ info.embeds = append(info.embeds, embs...)
+ }
+ }
+ }
+
+ return nil
+}
+
+// parseGoEmbed parses the text following "//go:embed" to extract the glob patterns.
+// It accepts unquoted space-separated patterns as well as double-quoted and back-quoted Go strings.
+// This is based on a similar function in cmd/compile/internal/gc/noder.go;
+// this version calculates position information as well.
+func parseGoEmbed(args string, pos token.Position) ([]fileEmbed, error) {
+ trimBytes := func(n int) {
+ pos.Offset += n
+ pos.Column += utf8.RuneCountInString(args[:n])
+ args = args[n:]
+ }
+ trimSpace := func() {
+ trim := strings.TrimLeftFunc(args, unicode.IsSpace)
+ trimBytes(len(args) - len(trim))
+ }
+
+ var list []fileEmbed
+ for trimSpace(); args != ""; trimSpace() {
+ var path string
+ pathPos := pos
+ Switch:
+ switch args[0] {
+ default:
+ i := len(args)
+ for j, c := range args {
+ if unicode.IsSpace(c) {
+ i = j
+ break
+ }
+ }
+ path = args[:i]
+ trimBytes(i)
+
+ case '`':
+ i := strings.Index(args[1:], "`")
+ if i < 0 {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ path = args[1 : 1+i]
+ trimBytes(1 + i + 1)
+
+ case '"':
+ i := 1
+ for ; i < len(args); i++ {
+ if args[i] == '\\' {
+ i++
+ continue
+ }
+ if args[i] == '"' {
+ q, err := strconv.Unquote(args[:i+1])
+ if err != nil {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1])
+ }
+ path = q
+ trimBytes(i + 1)
+ break Switch
+ }
+ }
+ if i >= len(args) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+
+ if args != "" {
+ r, _ := utf8.DecodeRuneInString(args)
+ if !unicode.IsSpace(r) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+ list = append(list, fileEmbed{path, pathPos})
+ }
+ return list, nil
+}
diff --git a/src/go/build/read_test.go b/src/go/build/read_test.go
new file mode 100644
index 0000000..32e6bae
--- /dev/null
+++ b/src/go/build/read_test.go
@@ -0,0 +1,321 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "fmt"
+ "go/token"
+ "io"
+ "strings"
+ "testing"
+)
+
+const quote = "`"
+
+type readTest struct {
+ // Test input contains ℙ where readGoInfo should stop.
+ in string
+ err string
+}
+
+var readGoInfoTests = []readTest{
+ {
+ `package p`,
+ "",
+ },
+ {
+ `package p; import "x"`,
+ "",
+ },
+ {
+ `package p; import . "x"`,
+ "",
+ },
+ {
+ `package p; import "x";ℙvar x = 1`,
+ "",
+ },
+ {
+ `package p
+
+ // comment
+
+ import "x"
+ import _ "x"
+ import a "x"
+
+ /* comment */
+
+ import (
+ "x" /* comment */
+ _ "x"
+ a "x" // comment
+ ` + quote + `x` + quote + `
+ _ /*comment*/ ` + quote + `x` + quote + `
+ a ` + quote + `x` + quote + `
+ )
+ import (
+ )
+ import ()
+ import()import()import()
+ import();import();import()
+
+ ℙvar x = 1
+ `,
+ "",
+ },
+}
+
+var readCommentsTests = []readTest{
+ {
+ `ℙpackage p`,
+ "",
+ },
+ {
+ `ℙpackage p; import "x"`,
+ "",
+ },
+ {
+ `ℙpackage p; import . "x"`,
+ "",
+ },
+ {
+ `// foo
+
+ /* bar */
+
+ /* quux */ // baz
+
+ /*/ zot */
+
+ // asdf
+ ℙHello, world`,
+ "",
+ },
+}
+
+func testRead(t *testing.T, tests []readTest, read func(io.Reader) ([]byte, error)) {
+ for i, tt := range tests {
+ var in, testOut string
+ j := strings.Index(tt.in, "ℙ")
+ if j < 0 {
+ in = tt.in
+ testOut = tt.in
+ } else {
+ in = tt.in[:j] + tt.in[j+len("ℙ"):]
+ testOut = tt.in[:j]
+ }
+ r := strings.NewReader(in)
+ buf, err := read(r)
+ if err != nil {
+ if tt.err == "" {
+ t.Errorf("#%d: err=%q, expected success (%q)", i, err, string(buf))
+ } else if !strings.Contains(err.Error(), tt.err) {
+ t.Errorf("#%d: err=%q, expected %q", i, err, tt.err)
+ }
+ continue
+ }
+ if tt.err != "" {
+ t.Errorf("#%d: success, expected %q", i, tt.err)
+ continue
+ }
+
+ out := string(buf)
+ if out != testOut {
+ t.Errorf("#%d: wrong output:\nhave %q\nwant %q\n", i, out, testOut)
+ }
+ }
+}
+
+func TestReadGoInfo(t *testing.T) {
+ testRead(t, readGoInfoTests, func(r io.Reader) ([]byte, error) {
+ var info fileInfo
+ err := readGoInfo(r, &info)
+ return info.header, err
+ })
+}
+
+func TestReadComments(t *testing.T) {
+ testRead(t, readCommentsTests, readComments)
+}
+
+var readFailuresTests = []readTest{
+ {
+ `package`,
+ "syntax error",
+ },
+ {
+ "package p\n\x00\nimport `math`\n",
+ "unexpected NUL in input",
+ },
+ {
+ `package p; import`,
+ "syntax error",
+ },
+ {
+ `package p; import "`,
+ "syntax error",
+ },
+ {
+ "package p; import ` \n\n",
+ "syntax error",
+ },
+ {
+ `package p; import "x`,
+ "syntax error",
+ },
+ {
+ `package p; import _`,
+ "syntax error",
+ },
+ {
+ `package p; import _ "`,
+ "syntax error",
+ },
+ {
+ `package p; import _ "x`,
+ "syntax error",
+ },
+ {
+ `package p; import .`,
+ "syntax error",
+ },
+ {
+ `package p; import . "`,
+ "syntax error",
+ },
+ {
+ `package p; import . "x`,
+ "syntax error",
+ },
+ {
+ `package p; import (`,
+ "syntax error",
+ },
+ {
+ `package p; import ("`,
+ "syntax error",
+ },
+ {
+ `package p; import ("x`,
+ "syntax error",
+ },
+ {
+ `package p; import ("x"`,
+ "syntax error",
+ },
+}
+
+func TestReadFailuresIgnored(t *testing.T) {
+ // Syntax errors should not be reported (false arg to readImports).
+ // Instead, entire file should be the output and no error.
+ // Convert tests not to return syntax errors.
+ tests := make([]readTest, len(readFailuresTests))
+ copy(tests, readFailuresTests)
+ for i := range tests {
+ tt := &tests[i]
+ if !strings.Contains(tt.err, "NUL") {
+ tt.err = ""
+ }
+ }
+ testRead(t, tests, func(r io.Reader) ([]byte, error) {
+ var info fileInfo
+ err := readGoInfo(r, &info)
+ return info.header, err
+ })
+}
+
+var readEmbedTests = []struct {
+ in, out string
+}{
+ {
+ "package p\n",
+ "",
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x y z\nvar files embed.FS",
+ `test:4:12:x
+ test:4:14:y
+ test:4:16:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x \"\\x79\" `z`\nvar files embed.FS",
+ `test:4:12:x
+ test:4:14:y
+ test:4:21:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x y\n//go:embed z\nvar files embed.FS",
+ `test:4:12:x
+ test:4:14:y
+ test:5:12:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n\t //go:embed x y\n\t //go:embed z\n\t var files embed.FS",
+ `test:4:14:x
+ test:4:16:y
+ test:5:14:z`,
+ },
+ {
+ "package p\nimport \"embed\"\n//go:embed x y z\nvar files embed.FS",
+ `test:3:12:x
+ test:3:14:y
+ test:3:16:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar s = \"/*\"\n//go:embed x\nvar files embed.FS",
+ `test:4:12:x`,
+ },
+ {
+ `package p
+ import "embed"
+ var s = "\"\\\\"
+ //go:embed x
+ var files embed.FS`,
+ `test:4:15:x`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar s = `/*`\n//go:embed x\nvar files embed.FS",
+ `test:4:12:x`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar s = z/ *y\n//go:embed pointer\nvar pointer embed.FS",
+ "test:4:12:pointer",
+ },
+ {
+ "package p\n//go:embed x y z\n", // no import, no scan
+ "",
+ },
+ {
+ "package p\n//go:embed x y z\nvar files embed.FS", // no import, no scan
+ "",
+ },
+}
+
+func TestReadEmbed(t *testing.T) {
+ fset := token.NewFileSet()
+ for i, tt := range readEmbedTests {
+ info := fileInfo{
+ name: "test",
+ fset: fset,
+ }
+ err := readGoInfo(strings.NewReader(tt.in), &info)
+ if err != nil {
+ t.Errorf("#%d: %v", i, err)
+ continue
+ }
+ b := &strings.Builder{}
+ sep := ""
+ for _, emb := range info.embeds {
+ fmt.Fprintf(b, "%s%v:%s", sep, emb.pos, emb.pattern)
+ sep = "\n"
+ }
+ got := b.String()
+ want := strings.Join(strings.Fields(tt.out), "\n")
+ if got != want {
+ t.Errorf("#%d: embeds:\n%s\nwant:\n%s", i, got, want)
+ }
+ }
+}
diff --git a/src/go/build/syslist.go b/src/go/build/syslist.go
new file mode 100644
index 0000000..1275f7c
--- /dev/null
+++ b/src/go/build/syslist.go
@@ -0,0 +1,11 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+// List of past, present, and future known GOOS and GOARCH values.
+// Do not remove from this list, as these are used for go/build filename matching.
+
+const goosList = "aix android darwin dragonfly freebsd hurd illumos ios js linux nacl netbsd openbsd plan9 solaris windows zos "
+const goarchList = "386 amd64 amd64p32 arm armbe arm64 arm64be ppc64 ppc64le mips mipsle mips64 mips64le mips64p32 mips64p32le ppc riscv riscv64 s390 s390x sparc sparc64 wasm "
diff --git a/src/go/build/syslist_test.go b/src/go/build/syslist_test.go
new file mode 100644
index 0000000..2b7b4c7
--- /dev/null
+++ b/src/go/build/syslist_test.go
@@ -0,0 +1,62 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "runtime"
+ "testing"
+)
+
+var (
+ thisOS = runtime.GOOS
+ thisArch = runtime.GOARCH
+ otherOS = anotherOS()
+ otherArch = anotherArch()
+)
+
+func anotherOS() string {
+ if thisOS != "darwin" && thisOS != "ios" {
+ return "darwin"
+ }
+ return "linux"
+}
+
+func anotherArch() string {
+ if thisArch != "amd64" {
+ return "amd64"
+ }
+ return "386"
+}
+
+type GoodFileTest struct {
+ name string
+ result bool
+}
+
+var tests = []GoodFileTest{
+ {"file.go", true},
+ {"file.c", true},
+ {"file_foo.go", true},
+ {"file_" + thisArch + ".go", true},
+ {"file_" + otherArch + ".go", false},
+ {"file_" + thisOS + ".go", true},
+ {"file_" + otherOS + ".go", false},
+ {"file_" + thisOS + "_" + thisArch + ".go", true},
+ {"file_" + otherOS + "_" + thisArch + ".go", false},
+ {"file_" + thisOS + "_" + otherArch + ".go", false},
+ {"file_" + otherOS + "_" + otherArch + ".go", false},
+ {"file_foo_" + thisArch + ".go", true},
+ {"file_foo_" + otherArch + ".go", false},
+ {"file_" + thisOS + ".c", true},
+ {"file_" + otherOS + ".c", false},
+}
+
+func TestGoodOSArch(t *testing.T) {
+ for _, test := range tests {
+ if Default.goodOSArchFile(test.name, make(map[string]bool)) != test.result {
+ t.Fatalf("goodOSArchFile(%q) != %v", test.name, test.result)
+ }
+ }
+}
diff --git a/src/go/build/testdata/cgo_disabled/cgo_disabled.go b/src/go/build/testdata/cgo_disabled/cgo_disabled.go
new file mode 100644
index 0000000..d1edb99
--- /dev/null
+++ b/src/go/build/testdata/cgo_disabled/cgo_disabled.go
@@ -0,0 +1,5 @@
+package cgo_disabled
+
+import "C"
+
+import _ "should/be/ignored"
diff --git a/src/go/build/testdata/cgo_disabled/empty.go b/src/go/build/testdata/cgo_disabled/empty.go
new file mode 100644
index 0000000..63afe42
--- /dev/null
+++ b/src/go/build/testdata/cgo_disabled/empty.go
@@ -0,0 +1 @@
+package cgo_disabled
diff --git a/src/go/build/testdata/doc/a_test.go b/src/go/build/testdata/doc/a_test.go
new file mode 100644
index 0000000..1c07b56
--- /dev/null
+++ b/src/go/build/testdata/doc/a_test.go
@@ -0,0 +1,2 @@
+// Doc from xtests
+package doc_test
diff --git a/src/go/build/testdata/doc/b_test.go b/src/go/build/testdata/doc/b_test.go
new file mode 100644
index 0000000..0cf1605
--- /dev/null
+++ b/src/go/build/testdata/doc/b_test.go
@@ -0,0 +1 @@
+package doc_test
diff --git a/src/go/build/testdata/doc/c_test.go b/src/go/build/testdata/doc/c_test.go
new file mode 100644
index 0000000..1025707
--- /dev/null
+++ b/src/go/build/testdata/doc/c_test.go
@@ -0,0 +1 @@
+package doc
diff --git a/src/go/build/testdata/doc/d_test.go b/src/go/build/testdata/doc/d_test.go
new file mode 100644
index 0000000..ec19564
--- /dev/null
+++ b/src/go/build/testdata/doc/d_test.go
@@ -0,0 +1,2 @@
+// Doc from regular tests.
+package doc
diff --git a/src/go/build/testdata/doc/e.go b/src/go/build/testdata/doc/e.go
new file mode 100644
index 0000000..1025707
--- /dev/null
+++ b/src/go/build/testdata/doc/e.go
@@ -0,0 +1 @@
+package doc
diff --git a/src/go/build/testdata/doc/f.go b/src/go/build/testdata/doc/f.go
new file mode 100644
index 0000000..ab1d0bc
--- /dev/null
+++ b/src/go/build/testdata/doc/f.go
@@ -0,0 +1,2 @@
+// Correct
+package doc
diff --git a/src/go/build/testdata/empty/dummy b/src/go/build/testdata/empty/dummy
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/go/build/testdata/empty/dummy
diff --git a/src/go/build/testdata/multi/file.go b/src/go/build/testdata/multi/file.go
new file mode 100644
index 0000000..ee946eb
--- /dev/null
+++ b/src/go/build/testdata/multi/file.go
@@ -0,0 +1,5 @@
+// Test data - not compiled.
+
+package main
+
+func main() {}
diff --git a/src/go/build/testdata/multi/file_appengine.go b/src/go/build/testdata/multi/file_appengine.go
new file mode 100644
index 0000000..4ea31e7
--- /dev/null
+++ b/src/go/build/testdata/multi/file_appengine.go
@@ -0,0 +1,5 @@
+// Test data - not compiled.
+
+package test_package
+
+func init() {}
diff --git a/src/go/build/testdata/other/file/file.go b/src/go/build/testdata/other/file/file.go
new file mode 100644
index 0000000..bbfd3e9
--- /dev/null
+++ b/src/go/build/testdata/other/file/file.go
@@ -0,0 +1,5 @@
+// Test data - not compiled.
+
+package file
+
+func F() {}
diff --git a/src/go/build/testdata/other/main.go b/src/go/build/testdata/other/main.go
new file mode 100644
index 0000000..e090435
--- /dev/null
+++ b/src/go/build/testdata/other/main.go
@@ -0,0 +1,11 @@
+// Test data - not compiled.
+
+package main
+
+import (
+ "./file"
+)
+
+func main() {
+ file.F()
+}
diff --git a/src/go/build/testdata/withvendor/src/a/b/b.go b/src/go/build/testdata/withvendor/src/a/b/b.go
new file mode 100644
index 0000000..4405d54
--- /dev/null
+++ b/src/go/build/testdata/withvendor/src/a/b/b.go
@@ -0,0 +1,3 @@
+package b
+
+import _ "c/d"
diff --git a/src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go b/src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go
new file mode 100644
index 0000000..142fb42
--- /dev/null
+++ b/src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go
@@ -0,0 +1 @@
+package d