summaryrefslogtreecommitdiffstats
path: root/src/go/build
diff options
context:
space:
mode:
Diffstat (limited to 'src/go/build')
-rw-r--r--src/go/build/build.go2012
-rw-r--r--src/go/build/build_test.go803
-rw-r--r--src/go/build/constraint/expr.go574
-rw-r--r--src/go/build/constraint/expr_test.go321
-rw-r--r--src/go/build/deps_test.go774
-rw-r--r--src/go/build/doc.go98
-rw-r--r--src/go/build/gc.go17
-rw-r--r--src/go/build/gccgo.go14
-rw-r--r--src/go/build/read.go600
-rw-r--r--src/go/build/read_test.go352
-rw-r--r--src/go/build/syslist.go80
-rw-r--r--src/go/build/syslist_test.go62
-rw-r--r--src/go/build/testdata/alltags/alltags.go5
-rw-r--r--src/go/build/testdata/alltags/x_netbsd_arm.go5
-rw-r--r--src/go/build/testdata/bads/bad.s1
-rw-r--r--src/go/build/testdata/cgo_disabled/cgo_disabled.go5
-rw-r--r--src/go/build/testdata/cgo_disabled/empty.go1
-rw-r--r--src/go/build/testdata/doc/a_test.go2
-rw-r--r--src/go/build/testdata/doc/b_test.go1
-rw-r--r--src/go/build/testdata/doc/c_test.go1
-rw-r--r--src/go/build/testdata/doc/d_test.go2
-rw-r--r--src/go/build/testdata/doc/e.go1
-rw-r--r--src/go/build/testdata/doc/f.go2
-rw-r--r--src/go/build/testdata/empty/dummy0
-rw-r--r--src/go/build/testdata/multi/file.go5
-rw-r--r--src/go/build/testdata/multi/file_appengine.go5
-rw-r--r--src/go/build/testdata/non_source_tags/non_source_tags.go5
-rw-r--r--src/go/build/testdata/non_source_tags/x_arm.go.ignore5
-rw-r--r--src/go/build/testdata/other/file/file.go5
-rw-r--r--src/go/build/testdata/other/main.go11
-rw-r--r--src/go/build/testdata/withvendor/src/a/b/b.go3
-rw-r--r--src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go1
32 files changed, 5773 insertions, 0 deletions
diff --git a/src/go/build/build.go b/src/go/build/build.go
new file mode 100644
index 0000000..420873c
--- /dev/null
+++ b/src/go/build/build.go
@@ -0,0 +1,2012 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/build/constraint"
+ "go/doc"
+ "go/token"
+ "internal/buildcfg"
+ "internal/godebug"
+ "internal/goroot"
+ "internal/goversion"
+ "io"
+ "io/fs"
+ "os"
+ "os/exec"
+ pathpkg "path"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// A Context specifies the supporting context for a build.
+type Context struct {
+ GOARCH string // target architecture
+ GOOS string // target operating system
+ GOROOT string // Go root
+ GOPATH string // Go paths
+
+ // Dir is the caller's working directory, or the empty string to use
+ // the current directory of the running process. In module mode, this is used
+ // to locate the main module.
+ //
+ // If Dir is non-empty, directories passed to Import and ImportDir must
+ // be absolute.
+ Dir string
+
+ CgoEnabled bool // whether cgo files are included
+ UseAllFiles bool // use files regardless of go:build lines, file names
+ Compiler string // compiler to assume when computing target paths
+
+ // The build, tool, and release tags specify build constraints
+ // that should be considered satisfied when processing go:build lines.
+ // Clients creating a new context may customize BuildTags, which
+ // defaults to empty, but it is usually an error to customize ToolTags or ReleaseTags.
+ // ToolTags defaults to build tags appropriate to the current Go toolchain configuration.
+ // ReleaseTags defaults to the list of Go releases the current release is compatible with.
+ // BuildTags is not set for the Default build Context.
+ // In addition to the BuildTags, ToolTags, and ReleaseTags, build constraints
+ // consider the values of GOARCH and GOOS as satisfied tags.
+ // The last element in ReleaseTags is assumed to be the current release.
+ BuildTags []string
+ ToolTags []string
+ ReleaseTags []string
+
+ // The install suffix specifies a suffix to use in the name of the installation
+ // directory. By default it is empty, but custom builds that need to keep
+ // their outputs separate can set InstallSuffix to do so. For example, when
+ // using the race detector, the go command uses InstallSuffix = "race", so
+ // that on a Linux/386 system, packages are written to a directory named
+ // "linux_386_race" instead of the usual "linux_386".
+ InstallSuffix string
+
+ // By default, Import uses the operating system's file system calls
+ // to read directories and files. To read from other sources,
+ // callers can set the following functions. They all have default
+ // behaviors that use the local file system, so clients need only set
+ // the functions whose behaviors they wish to change.
+
+ // JoinPath joins the sequence of path fragments into a single path.
+ // If JoinPath is nil, Import uses filepath.Join.
+ JoinPath func(elem ...string) string
+
+ // SplitPathList splits the path list into a slice of individual paths.
+ // If SplitPathList is nil, Import uses filepath.SplitList.
+ SplitPathList func(list string) []string
+
+ // IsAbsPath reports whether path is an absolute path.
+ // If IsAbsPath is nil, Import uses filepath.IsAbs.
+ IsAbsPath func(path string) bool
+
+ // IsDir reports whether the path names a directory.
+ // If IsDir is nil, Import calls os.Stat and uses the result's IsDir method.
+ IsDir func(path string) bool
+
+ // HasSubdir reports whether dir is lexically a subdirectory of
+ // root, perhaps multiple levels below. It does not try to check
+ // whether dir exists.
+ // If so, HasSubdir sets rel to a slash-separated path that
+ // can be joined to root to produce a path equivalent to dir.
+ // If HasSubdir is nil, Import uses an implementation built on
+ // filepath.EvalSymlinks.
+ HasSubdir func(root, dir string) (rel string, ok bool)
+
+ // ReadDir returns a slice of fs.FileInfo, sorted by Name,
+ // describing the content of the named directory.
+ // If ReadDir is nil, Import uses os.ReadDir.
+ ReadDir func(dir string) ([]fs.FileInfo, error)
+
+ // OpenFile opens a file (not a directory) for reading.
+ // If OpenFile is nil, Import uses os.Open.
+ OpenFile func(path string) (io.ReadCloser, error)
+}
+
+// joinPath calls ctxt.JoinPath (if not nil) or else filepath.Join.
+func (ctxt *Context) joinPath(elem ...string) string {
+ if f := ctxt.JoinPath; f != nil {
+ return f(elem...)
+ }
+ return filepath.Join(elem...)
+}
+
+// splitPathList calls ctxt.SplitPathList (if not nil) or else filepath.SplitList.
+func (ctxt *Context) splitPathList(s string) []string {
+ if f := ctxt.SplitPathList; f != nil {
+ return f(s)
+ }
+ return filepath.SplitList(s)
+}
+
+// isAbsPath calls ctxt.IsAbsPath (if not nil) or else filepath.IsAbs.
+func (ctxt *Context) isAbsPath(path string) bool {
+ if f := ctxt.IsAbsPath; f != nil {
+ return f(path)
+ }
+ return filepath.IsAbs(path)
+}
+
+// isDir calls ctxt.IsDir (if not nil) or else uses os.Stat.
+func (ctxt *Context) isDir(path string) bool {
+ if f := ctxt.IsDir; f != nil {
+ return f(path)
+ }
+ fi, err := os.Stat(path)
+ return err == nil && fi.IsDir()
+}
+
+// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses
+// the local file system to answer the question.
+func (ctxt *Context) hasSubdir(root, dir string) (rel string, ok bool) {
+ if f := ctxt.HasSubdir; f != nil {
+ return f(root, dir)
+ }
+
+ // Try using paths we received.
+ if rel, ok = hasSubdir(root, dir); ok {
+ return
+ }
+
+ // Try expanding symlinks and comparing
+ // expanded against unexpanded and
+ // expanded against expanded.
+ rootSym, _ := filepath.EvalSymlinks(root)
+ dirSym, _ := filepath.EvalSymlinks(dir)
+
+ if rel, ok = hasSubdir(rootSym, dir); ok {
+ return
+ }
+ if rel, ok = hasSubdir(root, dirSym); ok {
+ return
+ }
+ return hasSubdir(rootSym, dirSym)
+}
+
+// hasSubdir reports if dir is within root by performing lexical analysis only.
+func hasSubdir(root, dir string) (rel string, ok bool) {
+ const sep = string(filepath.Separator)
+ root = filepath.Clean(root)
+ if !strings.HasSuffix(root, sep) {
+ root += sep
+ }
+ dir = filepath.Clean(dir)
+ after, found := strings.CutPrefix(dir, root)
+ if !found {
+ return "", false
+ }
+ return filepath.ToSlash(after), true
+}
+
+// readDir calls ctxt.ReadDir (if not nil) or else os.ReadDir.
+func (ctxt *Context) readDir(path string) ([]fs.DirEntry, error) {
+ // TODO: add a fs.DirEntry version of Context.ReadDir
+ if f := ctxt.ReadDir; f != nil {
+ fis, err := f(path)
+ if err != nil {
+ return nil, err
+ }
+ des := make([]fs.DirEntry, len(fis))
+ for i, fi := range fis {
+ des[i] = fs.FileInfoToDirEntry(fi)
+ }
+ return des, nil
+ }
+ return os.ReadDir(path)
+}
+
+// openFile calls ctxt.OpenFile (if not nil) or else os.Open.
+func (ctxt *Context) openFile(path string) (io.ReadCloser, error) {
+ if fn := ctxt.OpenFile; fn != nil {
+ return fn(path)
+ }
+
+ f, err := os.Open(path)
+ if err != nil {
+ return nil, err // nil interface
+ }
+ return f, nil
+}
+
+// isFile determines whether path is a file by trying to open it.
+// It reuses openFile instead of adding another function to the
+// list in Context.
+func (ctxt *Context) isFile(path string) bool {
+ f, err := ctxt.openFile(path)
+ if err != nil {
+ return false
+ }
+ f.Close()
+ return true
+}
+
+// gopath returns the list of Go path directories.
+func (ctxt *Context) gopath() []string {
+ var all []string
+ for _, p := range ctxt.splitPathList(ctxt.GOPATH) {
+ if p == "" || p == ctxt.GOROOT {
+ // Empty paths are uninteresting.
+ // If the path is the GOROOT, ignore it.
+ // People sometimes set GOPATH=$GOROOT.
+ // Do not get confused by this common mistake.
+ continue
+ }
+ if strings.HasPrefix(p, "~") {
+ // Path segments starting with ~ on Unix are almost always
+ // users who have incorrectly quoted ~ while setting GOPATH,
+ // preventing it from expanding to $HOME.
+ // The situation is made more confusing by the fact that
+ // bash allows quoted ~ in $PATH (most shells do not).
+ // Do not get confused by this, and do not try to use the path.
+ // It does not exist, and printing errors about it confuses
+ // those users even more, because they think "sure ~ exists!".
+ // The go command diagnoses this situation and prints a
+ // useful error.
+ // On Windows, ~ is used in short names, such as c:\progra~1
+ // for c:\program files.
+ continue
+ }
+ all = append(all, p)
+ }
+ return all
+}
+
+// SrcDirs returns a list of package source root directories.
+// It draws from the current Go root and Go path but omits directories
+// that do not exist.
+func (ctxt *Context) SrcDirs() []string {
+ var all []string
+ if ctxt.GOROOT != "" && ctxt.Compiler != "gccgo" {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src")
+ if ctxt.isDir(dir) {
+ all = append(all, dir)
+ }
+ }
+ for _, p := range ctxt.gopath() {
+ dir := ctxt.joinPath(p, "src")
+ if ctxt.isDir(dir) {
+ all = append(all, dir)
+ }
+ }
+ return all
+}
+
+// Default is the default Context for builds.
+// It uses the GOARCH, GOOS, GOROOT, and GOPATH environment variables
+// if set, or else the compiled code's GOARCH, GOOS, and GOROOT.
+var Default Context = defaultContext()
+
+func defaultGOPATH() string {
+ env := "HOME"
+ if runtime.GOOS == "windows" {
+ env = "USERPROFILE"
+ } else if runtime.GOOS == "plan9" {
+ env = "home"
+ }
+ if home := os.Getenv(env); home != "" {
+ def := filepath.Join(home, "go")
+ if filepath.Clean(def) == filepath.Clean(runtime.GOROOT()) {
+ // Don't set the default GOPATH to GOROOT,
+ // as that will trigger warnings from the go tool.
+ return ""
+ }
+ return def
+ }
+ return ""
+}
+
+var defaultToolTags, defaultReleaseTags []string
+
+func defaultContext() Context {
+ var c Context
+
+ c.GOARCH = buildcfg.GOARCH
+ c.GOOS = buildcfg.GOOS
+ if goroot := runtime.GOROOT(); goroot != "" {
+ c.GOROOT = filepath.Clean(goroot)
+ }
+ c.GOPATH = envOr("GOPATH", defaultGOPATH())
+ c.Compiler = runtime.Compiler
+ c.ToolTags = append(c.ToolTags, buildcfg.ToolTags...)
+
+ defaultToolTags = append([]string{}, c.ToolTags...) // our own private copy
+
+ // Each major Go release in the Go 1.x series adds a new
+ // "go1.x" release tag. That is, the go1.x tag is present in
+ // all releases >= Go 1.x. Code that requires Go 1.x or later
+ // should say "go:build go1.x", and code that should only be
+ // built before Go 1.x (perhaps it is the stub to use in that
+ // case) should say "go:build !go1.x".
+ // The last element in ReleaseTags is the current release.
+ for i := 1; i <= goversion.Version; i++ {
+ c.ReleaseTags = append(c.ReleaseTags, "go1."+strconv.Itoa(i))
+ }
+
+ defaultReleaseTags = append([]string{}, c.ReleaseTags...) // our own private copy
+
+ env := os.Getenv("CGO_ENABLED")
+ if env == "" {
+ env = defaultCGO_ENABLED
+ }
+ switch env {
+ case "1":
+ c.CgoEnabled = true
+ case "0":
+ c.CgoEnabled = false
+ default:
+ // cgo must be explicitly enabled for cross compilation builds
+ if runtime.GOARCH == c.GOARCH && runtime.GOOS == c.GOOS {
+ c.CgoEnabled = cgoEnabled[c.GOOS+"/"+c.GOARCH]
+ break
+ }
+ c.CgoEnabled = false
+ }
+
+ return c
+}
+
+func envOr(name, def string) string {
+ s := os.Getenv(name)
+ if s == "" {
+ return def
+ }
+ return s
+}
+
+// An ImportMode controls the behavior of the Import method.
+type ImportMode uint
+
+const (
+ // If FindOnly is set, Import stops after locating the directory
+ // that should contain the sources for a package. It does not
+ // read any files in the directory.
+ FindOnly ImportMode = 1 << iota
+
+ // If AllowBinary is set, Import can be satisfied by a compiled
+ // package object without corresponding sources.
+ //
+ // Deprecated:
+ // The supported way to create a compiled-only package is to
+ // write source code containing a //go:binary-only-package comment at
+ // the top of the file. Such a package will be recognized
+ // regardless of this flag setting (because it has source code)
+ // and will have BinaryOnly set to true in the returned Package.
+ AllowBinary
+
+ // If ImportComment is set, parse import comments on package statements.
+ // Import returns an error if it finds a comment it cannot understand
+ // or finds conflicting comments in multiple source files.
+ // See golang.org/s/go14customimport for more information.
+ ImportComment
+
+ // By default, Import searches vendor directories
+ // that apply in the given source directory before searching
+ // the GOROOT and GOPATH roots.
+ // If an Import finds and returns a package using a vendor
+ // directory, the resulting ImportPath is the complete path
+ // to the package, including the path elements leading up
+ // to and including "vendor".
+ // For example, if Import("y", "x/subdir", 0) finds
+ // "x/vendor/y", the returned package's ImportPath is "x/vendor/y",
+ // not plain "y".
+ // See golang.org/s/go15vendor for more information.
+ //
+ // Setting IgnoreVendor ignores vendor directories.
+ //
+ // In contrast to the package's ImportPath,
+ // the returned package's Imports, TestImports, and XTestImports
+ // are always the exact import paths from the source files:
+ // Import makes no attempt to resolve or check those paths.
+ IgnoreVendor
+)
+
+// A Package describes the Go package found in a directory.
+type Package struct {
+ Dir string // directory containing package sources
+ Name string // package name
+ ImportComment string // path in import comment on package statement
+ Doc string // documentation synopsis
+ ImportPath string // import path of package ("" if unknown)
+ Root string // root of Go tree where this package lives
+ SrcRoot string // package source root directory ("" if unknown)
+ PkgRoot string // package install root directory ("" if unknown)
+ PkgTargetRoot string // architecture dependent install root directory ("" if unknown)
+ BinDir string // command install directory ("" if unknown)
+ Goroot bool // package found in Go root
+ PkgObj string // installed .a file
+ AllTags []string // tags that can influence file selection in this directory
+ ConflictDir string // this directory shadows Dir in $GOPATH
+ BinaryOnly bool // cannot be rebuilt from source (has //go:binary-only-package comment)
+
+ // Source files
+ GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles)
+ CgoFiles []string // .go source files that import "C"
+ IgnoredGoFiles []string // .go source files ignored for this build (including ignored _test.go files)
+ InvalidGoFiles []string // .go source files with detected problems (parse error, wrong package name, and so on)
+ IgnoredOtherFiles []string // non-.go source files ignored for this build
+ CFiles []string // .c source files
+ CXXFiles []string // .cc, .cpp and .cxx source files
+ MFiles []string // .m (Objective-C) source files
+ HFiles []string // .h, .hh, .hpp and .hxx source files
+ FFiles []string // .f, .F, .for and .f90 Fortran source files
+ SFiles []string // .s source files
+ SwigFiles []string // .swig files
+ SwigCXXFiles []string // .swigcxx files
+ SysoFiles []string // .syso system object files to add to archive
+
+ // Cgo directives
+ CgoCFLAGS []string // Cgo CFLAGS directives
+ CgoCPPFLAGS []string // Cgo CPPFLAGS directives
+ CgoCXXFLAGS []string // Cgo CXXFLAGS directives
+ CgoFFLAGS []string // Cgo FFLAGS directives
+ CgoLDFLAGS []string // Cgo LDFLAGS directives
+ CgoPkgConfig []string // Cgo pkg-config directives
+
+ // Test information
+ TestGoFiles []string // _test.go files in package
+ XTestGoFiles []string // _test.go files outside package
+
+ // Dependency information
+ Imports []string // import paths from GoFiles, CgoFiles
+ ImportPos map[string][]token.Position // line information for Imports
+ TestImports []string // import paths from TestGoFiles
+ TestImportPos map[string][]token.Position // line information for TestImports
+ XTestImports []string // import paths from XTestGoFiles
+ XTestImportPos map[string][]token.Position // line information for XTestImports
+
+ // //go:embed patterns found in Go source files
+ // For example, if a source file says
+ // //go:embed a* b.c
+ // then the list will contain those two strings as separate entries.
+ // (See package embed for more details about //go:embed.)
+ EmbedPatterns []string // patterns from GoFiles, CgoFiles
+ EmbedPatternPos map[string][]token.Position // line information for EmbedPatterns
+ TestEmbedPatterns []string // patterns from TestGoFiles
+ TestEmbedPatternPos map[string][]token.Position // line information for TestEmbedPatterns
+ XTestEmbedPatterns []string // patterns from XTestGoFiles
+ XTestEmbedPatternPos map[string][]token.Position // line information for XTestEmbedPatternPos
+}
+
+// IsCommand reports whether the package is considered a
+// command to be installed (not just a library).
+// Packages named "main" are treated as commands.
+func (p *Package) IsCommand() bool {
+ return p.Name == "main"
+}
+
+// ImportDir is like Import but processes the Go package found in
+// the named directory.
+func (ctxt *Context) ImportDir(dir string, mode ImportMode) (*Package, error) {
+ return ctxt.Import(".", dir, mode)
+}
+
+// NoGoError is the error used by Import to describe a directory
+// containing no buildable Go source files. (It may still contain
+// test files, files hidden by build tags, and so on.)
+type NoGoError struct {
+ Dir string
+}
+
+func (e *NoGoError) Error() string {
+ return "no buildable Go source files in " + e.Dir
+}
+
+// MultiplePackageError describes a directory containing
+// multiple buildable Go source files for multiple packages.
+type MultiplePackageError struct {
+ Dir string // directory containing files
+ Packages []string // package names found
+ Files []string // corresponding files: Files[i] declares package Packages[i]
+}
+
+func (e *MultiplePackageError) Error() string {
+ // Error string limited to two entries for compatibility.
+ return fmt.Sprintf("found packages %s (%s) and %s (%s) in %s", e.Packages[0], e.Files[0], e.Packages[1], e.Files[1], e.Dir)
+}
+
+func nameExt(name string) string {
+ i := strings.LastIndex(name, ".")
+ if i < 0 {
+ return ""
+ }
+ return name[i:]
+}
+
+var installgoroot = godebug.New("installgoroot")
+
+// Import returns details about the Go package named by the import path,
+// interpreting local import paths relative to the srcDir directory.
+// If the path is a local import path naming a package that can be imported
+// using a standard import path, the returned package will set p.ImportPath
+// to that path.
+//
+// In the directory containing the package, .go, .c, .h, and .s files are
+// considered part of the package except for:
+//
+// - .go files in package documentation
+// - files starting with _ or . (likely editor temporary files)
+// - files with build constraints not satisfied by the context
+//
+// If an error occurs, Import returns a non-nil error and a non-nil
+// *Package containing partial information.
+func (ctxt *Context) Import(path string, srcDir string, mode ImportMode) (*Package, error) {
+ p := &Package{
+ ImportPath: path,
+ }
+ if path == "" {
+ return p, fmt.Errorf("import %q: invalid import path", path)
+ }
+
+ var pkgtargetroot string
+ var pkga string
+ var pkgerr error
+ suffix := ""
+ if ctxt.InstallSuffix != "" {
+ suffix = "_" + ctxt.InstallSuffix
+ }
+ switch ctxt.Compiler {
+ case "gccgo":
+ pkgtargetroot = "pkg/gccgo_" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix
+ case "gc":
+ pkgtargetroot = "pkg/" + ctxt.GOOS + "_" + ctxt.GOARCH + suffix
+ default:
+ // Save error for end of function.
+ pkgerr = fmt.Errorf("import %q: unknown compiler %q", path, ctxt.Compiler)
+ }
+ setPkga := func() {
+ switch ctxt.Compiler {
+ case "gccgo":
+ dir, elem := pathpkg.Split(p.ImportPath)
+ pkga = pkgtargetroot + "/" + dir + "lib" + elem + ".a"
+ case "gc":
+ pkga = pkgtargetroot + "/" + p.ImportPath + ".a"
+ }
+ }
+ setPkga()
+
+ binaryOnly := false
+ if IsLocalImport(path) {
+ pkga = "" // local imports have no installed path
+ if srcDir == "" {
+ return p, fmt.Errorf("import %q: import relative to unknown directory", path)
+ }
+ if !ctxt.isAbsPath(path) {
+ p.Dir = ctxt.joinPath(srcDir, path)
+ }
+ // p.Dir directory may or may not exist. Gather partial information first, check if it exists later.
+ // Determine canonical import path, if any.
+ // Exclude results where the import path would include /testdata/.
+ inTestdata := func(sub string) bool {
+ return strings.Contains(sub, "/testdata/") || strings.HasSuffix(sub, "/testdata") || strings.HasPrefix(sub, "testdata/") || sub == "testdata"
+ }
+ if ctxt.GOROOT != "" {
+ root := ctxt.joinPath(ctxt.GOROOT, "src")
+ if sub, ok := ctxt.hasSubdir(root, p.Dir); ok && !inTestdata(sub) {
+ p.Goroot = true
+ p.ImportPath = sub
+ p.Root = ctxt.GOROOT
+ setPkga() // p.ImportPath changed
+ goto Found
+ }
+ }
+ all := ctxt.gopath()
+ for i, root := range all {
+ rootsrc := ctxt.joinPath(root, "src")
+ if sub, ok := ctxt.hasSubdir(rootsrc, p.Dir); ok && !inTestdata(sub) {
+ // We found a potential import path for dir,
+ // but check that using it wouldn't find something
+ // else first.
+ if ctxt.GOROOT != "" && ctxt.Compiler != "gccgo" {
+ if dir := ctxt.joinPath(ctxt.GOROOT, "src", sub); ctxt.isDir(dir) {
+ p.ConflictDir = dir
+ goto Found
+ }
+ }
+ for _, earlyRoot := range all[:i] {
+ if dir := ctxt.joinPath(earlyRoot, "src", sub); ctxt.isDir(dir) {
+ p.ConflictDir = dir
+ goto Found
+ }
+ }
+
+ // sub would not name some other directory instead of this one.
+ // Record it.
+ p.ImportPath = sub
+ p.Root = root
+ setPkga() // p.ImportPath changed
+ goto Found
+ }
+ }
+ // It's okay that we didn't find a root containing dir.
+ // Keep going with the information we have.
+ } else {
+ if strings.HasPrefix(path, "/") {
+ return p, fmt.Errorf("import %q: cannot import absolute path", path)
+ }
+
+ if err := ctxt.importGo(p, path, srcDir, mode); err == nil {
+ goto Found
+ } else if err != errNoModules {
+ return p, err
+ }
+
+ gopath := ctxt.gopath() // needed twice below; avoid computing many times
+
+ // tried records the location of unsuccessful package lookups
+ var tried struct {
+ vendor []string
+ goroot string
+ gopath []string
+ }
+
+ // Vendor directories get first chance to satisfy import.
+ if mode&IgnoreVendor == 0 && srcDir != "" {
+ searchVendor := func(root string, isGoroot bool) bool {
+ sub, ok := ctxt.hasSubdir(root, srcDir)
+ if !ok || !strings.HasPrefix(sub, "src/") || strings.Contains(sub, "/testdata/") {
+ return false
+ }
+ for {
+ vendor := ctxt.joinPath(root, sub, "vendor")
+ if ctxt.isDir(vendor) {
+ dir := ctxt.joinPath(vendor, path)
+ if ctxt.isDir(dir) && hasGoFiles(ctxt, dir) {
+ p.Dir = dir
+ p.ImportPath = strings.TrimPrefix(pathpkg.Join(sub, "vendor", path), "src/")
+ p.Goroot = isGoroot
+ p.Root = root
+ setPkga() // p.ImportPath changed
+ return true
+ }
+ tried.vendor = append(tried.vendor, dir)
+ }
+ i := strings.LastIndex(sub, "/")
+ if i < 0 {
+ break
+ }
+ sub = sub[:i]
+ }
+ return false
+ }
+ if ctxt.Compiler != "gccgo" && ctxt.GOROOT != "" && searchVendor(ctxt.GOROOT, true) {
+ goto Found
+ }
+ for _, root := range gopath {
+ if searchVendor(root, false) {
+ goto Found
+ }
+ }
+ }
+
+ // Determine directory from import path.
+ if ctxt.GOROOT != "" {
+ // If the package path starts with "vendor/", only search GOROOT before
+ // GOPATH if the importer is also within GOROOT. That way, if the user has
+ // vendored in a package that is subsequently included in the standard
+ // distribution, they'll continue to pick up their own vendored copy.
+ gorootFirst := srcDir == "" || !strings.HasPrefix(path, "vendor/")
+ if !gorootFirst {
+ _, gorootFirst = ctxt.hasSubdir(ctxt.GOROOT, srcDir)
+ }
+ if gorootFirst {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src", path)
+ if ctxt.Compiler != "gccgo" {
+ isDir := ctxt.isDir(dir)
+ binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
+ if isDir || binaryOnly {
+ p.Dir = dir
+ p.Goroot = true
+ p.Root = ctxt.GOROOT
+ goto Found
+ }
+ }
+ tried.goroot = dir
+ }
+ if ctxt.Compiler == "gccgo" && goroot.IsStandardPackage(ctxt.GOROOT, ctxt.Compiler, path) {
+ // TODO(bcmills): Setting p.Dir here is misleading, because gccgo
+ // doesn't actually load its standard-library packages from this
+ // directory. See if we can leave it unset.
+ p.Dir = ctxt.joinPath(ctxt.GOROOT, "src", path)
+ p.Goroot = true
+ p.Root = ctxt.GOROOT
+ goto Found
+ }
+ }
+ for _, root := range gopath {
+ dir := ctxt.joinPath(root, "src", path)
+ isDir := ctxt.isDir(dir)
+ binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(root, pkga))
+ if isDir || binaryOnly {
+ p.Dir = dir
+ p.Root = root
+ goto Found
+ }
+ tried.gopath = append(tried.gopath, dir)
+ }
+
+ // If we tried GOPATH first due to a "vendor/" prefix, fall back to GOPATH.
+ // That way, the user can still get useful results from 'go list' for
+ // standard-vendored paths passed on the command line.
+ if ctxt.GOROOT != "" && tried.goroot == "" {
+ dir := ctxt.joinPath(ctxt.GOROOT, "src", path)
+ if ctxt.Compiler != "gccgo" {
+ isDir := ctxt.isDir(dir)
+ binaryOnly = !isDir && mode&AllowBinary != 0 && pkga != "" && ctxt.isFile(ctxt.joinPath(ctxt.GOROOT, pkga))
+ if isDir || binaryOnly {
+ p.Dir = dir
+ p.Goroot = true
+ p.Root = ctxt.GOROOT
+ goto Found
+ }
+ }
+ tried.goroot = dir
+ }
+
+ // package was not found
+ var paths []string
+ format := "\t%s (vendor tree)"
+ for _, dir := range tried.vendor {
+ paths = append(paths, fmt.Sprintf(format, dir))
+ format = "\t%s"
+ }
+ if tried.goroot != "" {
+ paths = append(paths, fmt.Sprintf("\t%s (from $GOROOT)", tried.goroot))
+ } else {
+ paths = append(paths, "\t($GOROOT not set)")
+ }
+ format = "\t%s (from $GOPATH)"
+ for _, dir := range tried.gopath {
+ paths = append(paths, fmt.Sprintf(format, dir))
+ format = "\t%s"
+ }
+ if len(tried.gopath) == 0 {
+ paths = append(paths, "\t($GOPATH not set. For more details see: 'go help gopath')")
+ }
+ return p, fmt.Errorf("cannot find package %q in any of:\n%s", path, strings.Join(paths, "\n"))
+ }
+
+Found:
+ if p.Root != "" {
+ p.SrcRoot = ctxt.joinPath(p.Root, "src")
+ p.PkgRoot = ctxt.joinPath(p.Root, "pkg")
+ p.BinDir = ctxt.joinPath(p.Root, "bin")
+ if pkga != "" {
+ // Always set PkgTargetRoot. It might be used when building in shared
+ // mode.
+ p.PkgTargetRoot = ctxt.joinPath(p.Root, pkgtargetroot)
+
+ // Set the install target if applicable.
+ if !p.Goroot || (installgoroot.Value() == "all" && p.ImportPath != "unsafe" && p.ImportPath != "builtin") {
+ p.PkgObj = ctxt.joinPath(p.Root, pkga)
+ }
+ }
+ }
+
+ // If it's a local import path, by the time we get here, we still haven't checked
+ // that p.Dir directory exists. This is the right time to do that check.
+ // We can't do it earlier, because we want to gather partial information for the
+ // non-nil *Package returned when an error occurs.
+ // We need to do this before we return early on FindOnly flag.
+ if IsLocalImport(path) && !ctxt.isDir(p.Dir) {
+ if ctxt.Compiler == "gccgo" && p.Goroot {
+ // gccgo has no sources for GOROOT packages.
+ return p, nil
+ }
+
+ // package was not found
+ return p, fmt.Errorf("cannot find package %q in:\n\t%s", p.ImportPath, p.Dir)
+ }
+
+ if mode&FindOnly != 0 {
+ return p, pkgerr
+ }
+ if binaryOnly && (mode&AllowBinary) != 0 {
+ return p, pkgerr
+ }
+
+ if ctxt.Compiler == "gccgo" && p.Goroot {
+ // gccgo has no sources for GOROOT packages.
+ return p, nil
+ }
+
+ dirs, err := ctxt.readDir(p.Dir)
+ if err != nil {
+ return p, err
+ }
+
+ var badGoError error
+ badGoFiles := make(map[string]bool)
+ badGoFile := func(name string, err error) {
+ if badGoError == nil {
+ badGoError = err
+ }
+ if !badGoFiles[name] {
+ p.InvalidGoFiles = append(p.InvalidGoFiles, name)
+ badGoFiles[name] = true
+ }
+ }
+
+ var Sfiles []string // files with ".S"(capital S)/.sx(capital s equivalent for case insensitive filesystems)
+ var firstFile, firstCommentFile string
+ embedPos := make(map[string][]token.Position)
+ testEmbedPos := make(map[string][]token.Position)
+ xTestEmbedPos := make(map[string][]token.Position)
+ importPos := make(map[string][]token.Position)
+ testImportPos := make(map[string][]token.Position)
+ xTestImportPos := make(map[string][]token.Position)
+ allTags := make(map[string]bool)
+ fset := token.NewFileSet()
+ for _, d := range dirs {
+ if d.IsDir() {
+ continue
+ }
+ if d.Type() == fs.ModeSymlink {
+ if ctxt.isDir(ctxt.joinPath(p.Dir, d.Name())) {
+ // Symlinks to directories are not source files.
+ continue
+ }
+ }
+
+ name := d.Name()
+ ext := nameExt(name)
+
+ info, err := ctxt.matchFile(p.Dir, name, allTags, &p.BinaryOnly, fset)
+ if err != nil && strings.HasSuffix(name, ".go") {
+ badGoFile(name, err)
+ continue
+ }
+ if info == nil {
+ if strings.HasPrefix(name, "_") || strings.HasPrefix(name, ".") {
+ // not due to build constraints - don't report
+ } else if ext == ".go" {
+ p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
+ } else if fileListForExt(p, ext) != nil {
+ p.IgnoredOtherFiles = append(p.IgnoredOtherFiles, name)
+ }
+ continue
+ }
+
+ // Going to save the file. For non-Go files, can stop here.
+ switch ext {
+ case ".go":
+ // keep going
+ case ".S", ".sx":
+ // special case for cgo, handled at end
+ Sfiles = append(Sfiles, name)
+ continue
+ default:
+ if list := fileListForExt(p, ext); list != nil {
+ *list = append(*list, name)
+ }
+ continue
+ }
+
+ data, filename := info.header, info.name
+
+ if info.parseErr != nil {
+ badGoFile(name, info.parseErr)
+ // Fall through: we might still have a partial AST in info.parsed,
+ // and we want to list files with parse errors anyway.
+ }
+
+ var pkg string
+ if info.parsed != nil {
+ pkg = info.parsed.Name.Name
+ if pkg == "documentation" {
+ p.IgnoredGoFiles = append(p.IgnoredGoFiles, name)
+ continue
+ }
+ }
+
+ isTest := strings.HasSuffix(name, "_test.go")
+ isXTest := false
+ if isTest && strings.HasSuffix(pkg, "_test") && p.Name != pkg {
+ isXTest = true
+ pkg = pkg[:len(pkg)-len("_test")]
+ }
+
+ if p.Name == "" {
+ p.Name = pkg
+ firstFile = name
+ } else if pkg != p.Name {
+ // TODO(#45999): The choice of p.Name is arbitrary based on file iteration
+ // order. Instead of resolving p.Name arbitrarily, we should clear out the
+ // existing name and mark the existing files as also invalid.
+ badGoFile(name, &MultiplePackageError{
+ Dir: p.Dir,
+ Packages: []string{p.Name, pkg},
+ Files: []string{firstFile, name},
+ })
+ }
+ // Grab the first package comment as docs, provided it is not from a test file.
+ if info.parsed != nil && info.parsed.Doc != nil && p.Doc == "" && !isTest && !isXTest {
+ p.Doc = doc.Synopsis(info.parsed.Doc.Text())
+ }
+
+ if mode&ImportComment != 0 {
+ qcom, line := findImportComment(data)
+ if line != 0 {
+ com, err := strconv.Unquote(qcom)
+ if err != nil {
+ badGoFile(name, fmt.Errorf("%s:%d: cannot parse import comment", filename, line))
+ } else if p.ImportComment == "" {
+ p.ImportComment = com
+ firstCommentFile = name
+ } else if p.ImportComment != com {
+ badGoFile(name, fmt.Errorf("found import comments %q (%s) and %q (%s) in %s", p.ImportComment, firstCommentFile, com, name, p.Dir))
+ }
+ }
+ }
+
+ // Record imports and information about cgo.
+ isCgo := false
+ for _, imp := range info.imports {
+ if imp.path == "C" {
+ if isTest {
+ badGoFile(name, fmt.Errorf("use of cgo in test %s not supported", filename))
+ continue
+ }
+ isCgo = true
+ if imp.doc != nil {
+ if err := ctxt.saveCgo(filename, p, imp.doc); err != nil {
+ badGoFile(name, err)
+ }
+ }
+ }
+ }
+
+ var fileList *[]string
+ var importMap, embedMap map[string][]token.Position
+ switch {
+ case isCgo:
+ allTags["cgo"] = true
+ if ctxt.CgoEnabled {
+ fileList = &p.CgoFiles
+ importMap = importPos
+ embedMap = embedPos
+ } else {
+ // Ignore imports and embeds from cgo files if cgo is disabled.
+ fileList = &p.IgnoredGoFiles
+ }
+ case isXTest:
+ fileList = &p.XTestGoFiles
+ importMap = xTestImportPos
+ embedMap = xTestEmbedPos
+ case isTest:
+ fileList = &p.TestGoFiles
+ importMap = testImportPos
+ embedMap = testEmbedPos
+ default:
+ fileList = &p.GoFiles
+ importMap = importPos
+ embedMap = embedPos
+ }
+ *fileList = append(*fileList, name)
+ if importMap != nil {
+ for _, imp := range info.imports {
+ importMap[imp.path] = append(importMap[imp.path], fset.Position(imp.pos))
+ }
+ }
+ if embedMap != nil {
+ for _, emb := range info.embeds {
+ embedMap[emb.pattern] = append(embedMap[emb.pattern], emb.pos)
+ }
+ }
+ }
+
+ for tag := range allTags {
+ p.AllTags = append(p.AllTags, tag)
+ }
+ sort.Strings(p.AllTags)
+
+ p.EmbedPatterns, p.EmbedPatternPos = cleanDecls(embedPos)
+ p.TestEmbedPatterns, p.TestEmbedPatternPos = cleanDecls(testEmbedPos)
+ p.XTestEmbedPatterns, p.XTestEmbedPatternPos = cleanDecls(xTestEmbedPos)
+
+ p.Imports, p.ImportPos = cleanDecls(importPos)
+ p.TestImports, p.TestImportPos = cleanDecls(testImportPos)
+ p.XTestImports, p.XTestImportPos = cleanDecls(xTestImportPos)
+
+ // add the .S/.sx files only if we are using cgo
+ // (which means gcc will compile them).
+ // The standard assemblers expect .s files.
+ if len(p.CgoFiles) > 0 {
+ p.SFiles = append(p.SFiles, Sfiles...)
+ sort.Strings(p.SFiles)
+ } else {
+ p.IgnoredOtherFiles = append(p.IgnoredOtherFiles, Sfiles...)
+ sort.Strings(p.IgnoredOtherFiles)
+ }
+
+ if badGoError != nil {
+ return p, badGoError
+ }
+ if len(p.GoFiles)+len(p.CgoFiles)+len(p.TestGoFiles)+len(p.XTestGoFiles) == 0 {
+ return p, &NoGoError{p.Dir}
+ }
+ return p, pkgerr
+}
+
+func fileListForExt(p *Package, ext string) *[]string {
+ switch ext {
+ case ".c":
+ return &p.CFiles
+ case ".cc", ".cpp", ".cxx":
+ return &p.CXXFiles
+ case ".m":
+ return &p.MFiles
+ case ".h", ".hh", ".hpp", ".hxx":
+ return &p.HFiles
+ case ".f", ".F", ".for", ".f90":
+ return &p.FFiles
+ case ".s", ".S", ".sx":
+ return &p.SFiles
+ case ".swig":
+ return &p.SwigFiles
+ case ".swigcxx":
+ return &p.SwigCXXFiles
+ case ".syso":
+ return &p.SysoFiles
+ }
+ return nil
+}
+
+func uniq(list []string) []string {
+ if list == nil {
+ return nil
+ }
+ out := make([]string, len(list))
+ copy(out, list)
+ sort.Strings(out)
+ uniq := out[:0]
+ for _, x := range out {
+ if len(uniq) == 0 || uniq[len(uniq)-1] != x {
+ uniq = append(uniq, x)
+ }
+ }
+ return uniq
+}
+
+var errNoModules = errors.New("not using modules")
+
+// importGo checks whether it can use the go command to find the directory for path.
+// If using the go command is not appropriate, importGo returns errNoModules.
+// Otherwise, importGo tries using the go command and reports whether that succeeded.
+// Using the go command lets build.Import and build.Context.Import find code
+// in Go modules. In the long term we want tools to use go/packages (currently golang.org/x/tools/go/packages),
+// which will also use the go command.
+// Invoking the go command here is not very efficient in that it computes information
+// about the requested package and all dependencies and then only reports about the requested package.
+// Then we reinvoke it for every dependency. But this is still better than not working at all.
+// See golang.org/issue/26504.
+func (ctxt *Context) importGo(p *Package, path, srcDir string, mode ImportMode) error {
+ // To invoke the go command,
+ // we must not being doing special things like AllowBinary or IgnoreVendor,
+ // and all the file system callbacks must be nil (we're meant to use the local file system).
+ if mode&AllowBinary != 0 || mode&IgnoreVendor != 0 ||
+ ctxt.JoinPath != nil || ctxt.SplitPathList != nil || ctxt.IsAbsPath != nil || ctxt.IsDir != nil || ctxt.HasSubdir != nil || ctxt.ReadDir != nil || ctxt.OpenFile != nil || !equal(ctxt.ToolTags, defaultToolTags) || !equal(ctxt.ReleaseTags, defaultReleaseTags) {
+ return errNoModules
+ }
+
+ // If ctxt.GOROOT is not set, we don't know which go command to invoke,
+ // and even if we did we might return packages in GOROOT that we wouldn't otherwise find
+ // (because we don't know to search in 'go env GOROOT' otherwise).
+ if ctxt.GOROOT == "" {
+ return errNoModules
+ }
+
+ // Predict whether module aware mode is enabled by checking the value of
+ // GO111MODULE and looking for a go.mod file in the source directory or
+ // one of its parents. Running 'go env GOMOD' in the source directory would
+ // give a canonical answer, but we'd prefer not to execute another command.
+ go111Module := os.Getenv("GO111MODULE")
+ switch go111Module {
+ case "off":
+ return errNoModules
+ default: // "", "on", "auto", anything else
+ // Maybe use modules.
+ }
+
+ if srcDir != "" {
+ var absSrcDir string
+ if filepath.IsAbs(srcDir) {
+ absSrcDir = srcDir
+ } else if ctxt.Dir != "" {
+ return fmt.Errorf("go/build: Dir is non-empty, so relative srcDir is not allowed: %v", srcDir)
+ } else {
+ // Find the absolute source directory. hasSubdir does not handle
+ // relative paths (and can't because the callbacks don't support this).
+ var err error
+ absSrcDir, err = filepath.Abs(srcDir)
+ if err != nil {
+ return errNoModules
+ }
+ }
+
+ // If the source directory is in GOROOT, then the in-process code works fine
+ // and we should keep using it. Moreover, the 'go list' approach below doesn't
+ // take standard-library vendoring into account and will fail.
+ if _, ok := ctxt.hasSubdir(filepath.Join(ctxt.GOROOT, "src"), absSrcDir); ok {
+ return errNoModules
+ }
+ }
+
+ // For efficiency, if path is a standard library package, let the usual lookup code handle it.
+ if dir := ctxt.joinPath(ctxt.GOROOT, "src", path); ctxt.isDir(dir) {
+ return errNoModules
+ }
+
+ // If GO111MODULE=auto, look to see if there is a go.mod.
+ // Since go1.13, it doesn't matter if we're inside GOPATH.
+ if go111Module == "auto" {
+ var (
+ parent string
+ err error
+ )
+ if ctxt.Dir == "" {
+ parent, err = os.Getwd()
+ if err != nil {
+ // A nonexistent working directory can't be in a module.
+ return errNoModules
+ }
+ } else {
+ parent, err = filepath.Abs(ctxt.Dir)
+ if err != nil {
+ // If the caller passed a bogus Dir explicitly, that's materially
+ // different from not having modules enabled.
+ return err
+ }
+ }
+ for {
+ if f, err := ctxt.openFile(ctxt.joinPath(parent, "go.mod")); err == nil {
+ buf := make([]byte, 100)
+ _, err := f.Read(buf)
+ f.Close()
+ if err == nil || err == io.EOF {
+ // go.mod exists and is readable (is a file, not a directory).
+ break
+ }
+ }
+ d := filepath.Dir(parent)
+ if len(d) >= len(parent) {
+ return errNoModules // reached top of file system, no go.mod
+ }
+ parent = d
+ }
+ }
+
+ goCmd := filepath.Join(ctxt.GOROOT, "bin", "go")
+ cmd := exec.Command(goCmd, "list", "-e", "-compiler="+ctxt.Compiler, "-tags="+strings.Join(ctxt.BuildTags, ","), "-installsuffix="+ctxt.InstallSuffix, "-f={{.Dir}}\n{{.ImportPath}}\n{{.Root}}\n{{.Goroot}}\n{{if .Error}}{{.Error}}{{end}}\n", "--", path)
+
+ if ctxt.Dir != "" {
+ cmd.Dir = ctxt.Dir
+ }
+
+ var stdout, stderr strings.Builder
+ cmd.Stdout = &stdout
+ cmd.Stderr = &stderr
+
+ cgo := "0"
+ if ctxt.CgoEnabled {
+ cgo = "1"
+ }
+ cmd.Env = append(cmd.Environ(),
+ "GOOS="+ctxt.GOOS,
+ "GOARCH="+ctxt.GOARCH,
+ "GOROOT="+ctxt.GOROOT,
+ "GOPATH="+ctxt.GOPATH,
+ "CGO_ENABLED="+cgo,
+ )
+
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("go/build: go list %s: %v\n%s\n", path, err, stderr.String())
+ }
+
+ f := strings.SplitN(stdout.String(), "\n", 5)
+ if len(f) != 5 {
+ return fmt.Errorf("go/build: importGo %s: unexpected output:\n%s\n", path, stdout.String())
+ }
+ dir := f[0]
+ errStr := strings.TrimSpace(f[4])
+ if errStr != "" && dir == "" {
+ // If 'go list' could not locate the package (dir is empty),
+ // return the same error that 'go list' reported.
+ return errors.New(errStr)
+ }
+
+ // If 'go list' did locate the package, ignore the error.
+ // It was probably related to loading source files, and we'll
+ // encounter it ourselves shortly if the FindOnly flag isn't set.
+ p.Dir = dir
+ p.ImportPath = f[1]
+ p.Root = f[2]
+ p.Goroot = f[3] == "true"
+ return nil
+}
+
+func equal(x, y []string) bool {
+ if len(x) != len(y) {
+ return false
+ }
+ for i, xi := range x {
+ if xi != y[i] {
+ return false
+ }
+ }
+ return true
+}
+
+// hasGoFiles reports whether dir contains any files with names ending in .go.
+// For a vendor check we must exclude directories that contain no .go files.
+// Otherwise it is not possible to vendor just a/b/c and still import the
+// non-vendored a/b. See golang.org/issue/13832.
+func hasGoFiles(ctxt *Context, dir string) bool {
+ ents, _ := ctxt.readDir(dir)
+ for _, ent := range ents {
+ if !ent.IsDir() && strings.HasSuffix(ent.Name(), ".go") {
+ return true
+ }
+ }
+ return false
+}
+
+func findImportComment(data []byte) (s string, line int) {
+ // expect keyword package
+ word, data := parseWord(data)
+ if string(word) != "package" {
+ return "", 0
+ }
+
+ // expect package name
+ _, data = parseWord(data)
+
+ // now ready for import comment, a // or /* */ comment
+ // beginning and ending on the current line.
+ for len(data) > 0 && (data[0] == ' ' || data[0] == '\t' || data[0] == '\r') {
+ data = data[1:]
+ }
+
+ var comment []byte
+ switch {
+ case bytes.HasPrefix(data, slashSlash):
+ comment, _, _ = bytes.Cut(data[2:], newline)
+ case bytes.HasPrefix(data, slashStar):
+ var ok bool
+ comment, _, ok = bytes.Cut(data[2:], starSlash)
+ if !ok {
+ // malformed comment
+ return "", 0
+ }
+ if bytes.Contains(comment, newline) {
+ return "", 0
+ }
+ }
+ comment = bytes.TrimSpace(comment)
+
+ // split comment into `import`, `"pkg"`
+ word, arg := parseWord(comment)
+ if string(word) != "import" {
+ return "", 0
+ }
+
+ line = 1 + bytes.Count(data[:cap(data)-cap(arg)], newline)
+ return strings.TrimSpace(string(arg)), line
+}
+
+var (
+ slashSlash = []byte("//")
+ slashStar = []byte("/*")
+ starSlash = []byte("*/")
+ newline = []byte("\n")
+)
+
+// skipSpaceOrComment returns data with any leading spaces or comments removed.
+func skipSpaceOrComment(data []byte) []byte {
+ for len(data) > 0 {
+ switch data[0] {
+ case ' ', '\t', '\r', '\n':
+ data = data[1:]
+ continue
+ case '/':
+ if bytes.HasPrefix(data, slashSlash) {
+ i := bytes.Index(data, newline)
+ if i < 0 {
+ return nil
+ }
+ data = data[i+1:]
+ continue
+ }
+ if bytes.HasPrefix(data, slashStar) {
+ data = data[2:]
+ i := bytes.Index(data, starSlash)
+ if i < 0 {
+ return nil
+ }
+ data = data[i+2:]
+ continue
+ }
+ }
+ break
+ }
+ return data
+}
+
+// parseWord skips any leading spaces or comments in data
+// and then parses the beginning of data as an identifier or keyword,
+// returning that word and what remains after the word.
+func parseWord(data []byte) (word, rest []byte) {
+ data = skipSpaceOrComment(data)
+
+ // Parse past leading word characters.
+ rest = data
+ for {
+ r, size := utf8.DecodeRune(rest)
+ if unicode.IsLetter(r) || '0' <= r && r <= '9' || r == '_' {
+ rest = rest[size:]
+ continue
+ }
+ break
+ }
+
+ word = data[:len(data)-len(rest)]
+ if len(word) == 0 {
+ return nil, nil
+ }
+
+ return word, rest
+}
+
+// MatchFile reports whether the file with the given name in the given directory
+// matches the context and would be included in a Package created by ImportDir
+// of that directory.
+//
+// MatchFile considers the name of the file and may use ctxt.OpenFile to
+// read some or all of the file's content.
+func (ctxt *Context) MatchFile(dir, name string) (match bool, err error) {
+ info, err := ctxt.matchFile(dir, name, nil, nil, nil)
+ return info != nil, err
+}
+
+var dummyPkg Package
+
+// fileInfo records information learned about a file included in a build.
+type fileInfo struct {
+ name string // full name including dir
+ header []byte
+ fset *token.FileSet
+ parsed *ast.File
+ parseErr error
+ imports []fileImport
+ embeds []fileEmbed
+}
+
+type fileImport struct {
+ path string
+ pos token.Pos
+ doc *ast.CommentGroup
+}
+
+type fileEmbed struct {
+ pattern string
+ pos token.Position
+}
+
+// matchFile determines whether the file with the given name in the given directory
+// should be included in the package being constructed.
+// If the file should be included, matchFile returns a non-nil *fileInfo (and a nil error).
+// Non-nil errors are reserved for unexpected problems.
+//
+// If name denotes a Go program, matchFile reads until the end of the
+// imports and returns that section of the file in the fileInfo's header field,
+// even though it only considers text until the first non-comment
+// for go:build lines.
+//
+// If allTags is non-nil, matchFile records any encountered build tag
+// by setting allTags[tag] = true.
+func (ctxt *Context) matchFile(dir, name string, allTags map[string]bool, binaryOnly *bool, fset *token.FileSet) (*fileInfo, error) {
+ if strings.HasPrefix(name, "_") ||
+ strings.HasPrefix(name, ".") {
+ return nil, nil
+ }
+
+ i := strings.LastIndex(name, ".")
+ if i < 0 {
+ i = len(name)
+ }
+ ext := name[i:]
+
+ if ext != ".go" && fileListForExt(&dummyPkg, ext) == nil {
+ // skip
+ return nil, nil
+ }
+
+ if !ctxt.goodOSArchFile(name, allTags) && !ctxt.UseAllFiles {
+ return nil, nil
+ }
+
+ info := &fileInfo{name: ctxt.joinPath(dir, name), fset: fset}
+ if ext == ".syso" {
+ // binary, no reading
+ return info, nil
+ }
+
+ f, err := ctxt.openFile(info.name)
+ if err != nil {
+ return nil, err
+ }
+
+ if strings.HasSuffix(name, ".go") {
+ err = readGoInfo(f, info)
+ if strings.HasSuffix(name, "_test.go") {
+ binaryOnly = nil // ignore //go:binary-only-package comments in _test.go files
+ }
+ } else {
+ binaryOnly = nil // ignore //go:binary-only-package comments in non-Go sources
+ info.header, err = readComments(f)
+ }
+ f.Close()
+ if err != nil {
+ return info, fmt.Errorf("read %s: %v", info.name, err)
+ }
+
+ // Look for go:build comments to accept or reject the file.
+ ok, sawBinaryOnly, err := ctxt.shouldBuild(info.header, allTags)
+ if err != nil {
+ return nil, fmt.Errorf("%s: %v", name, err)
+ }
+ if !ok && !ctxt.UseAllFiles {
+ return nil, nil
+ }
+
+ if binaryOnly != nil && sawBinaryOnly {
+ *binaryOnly = true
+ }
+
+ return info, nil
+}
+
+func cleanDecls(m map[string][]token.Position) ([]string, map[string][]token.Position) {
+ all := make([]string, 0, len(m))
+ for path := range m {
+ all = append(all, path)
+ }
+ sort.Strings(all)
+ return all, m
+}
+
+// Import is shorthand for Default.Import.
+func Import(path, srcDir string, mode ImportMode) (*Package, error) {
+ return Default.Import(path, srcDir, mode)
+}
+
+// ImportDir is shorthand for Default.ImportDir.
+func ImportDir(dir string, mode ImportMode) (*Package, error) {
+ return Default.ImportDir(dir, mode)
+}
+
+var (
+ plusBuild = []byte("+build")
+
+ goBuildComment = []byte("//go:build")
+
+ errMultipleGoBuild = errors.New("multiple //go:build comments")
+)
+
+func isGoBuildComment(line []byte) bool {
+ if !bytes.HasPrefix(line, goBuildComment) {
+ return false
+ }
+ line = bytes.TrimSpace(line)
+ rest := line[len(goBuildComment):]
+ return len(rest) == 0 || len(bytes.TrimSpace(rest)) < len(rest)
+}
+
+// Special comment denoting a binary-only package.
+// See https://golang.org/design/2775-binary-only-packages
+// for more about the design of binary-only packages.
+var binaryOnlyComment = []byte("//go:binary-only-package")
+
+// shouldBuild reports whether it is okay to use this file,
+// The rule is that in the file's leading run of // comments
+// and blank lines, which must be followed by a blank line
+// (to avoid including a Go package clause doc comment),
+// lines beginning with '//go:build' are taken as build directives.
+//
+// The file is accepted only if each such line lists something
+// matching the file. For example:
+//
+// //go:build windows linux
+//
+// marks the file as applicable only on Windows and Linux.
+//
+// For each build tag it consults, shouldBuild sets allTags[tag] = true.
+//
+// shouldBuild reports whether the file should be built
+// and whether a //go:binary-only-package comment was found.
+func (ctxt *Context) shouldBuild(content []byte, allTags map[string]bool) (shouldBuild, binaryOnly bool, err error) {
+ // Identify leading run of // comments and blank lines,
+ // which must be followed by a blank line.
+ // Also identify any //go:build comments.
+ content, goBuild, sawBinaryOnly, err := parseFileHeader(content)
+ if err != nil {
+ return false, false, err
+ }
+
+ // If //go:build line is present, it controls.
+ // Otherwise fall back to +build processing.
+ switch {
+ case goBuild != nil:
+ x, err := constraint.Parse(string(goBuild))
+ if err != nil {
+ return false, false, fmt.Errorf("parsing //go:build line: %v", err)
+ }
+ shouldBuild = ctxt.eval(x, allTags)
+
+ default:
+ shouldBuild = true
+ p := content
+ for len(p) > 0 {
+ line := p
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, p = line[:i], p[i+1:]
+ } else {
+ p = p[len(p):]
+ }
+ line = bytes.TrimSpace(line)
+ if !bytes.HasPrefix(line, slashSlash) || !bytes.Contains(line, plusBuild) {
+ continue
+ }
+ text := string(line)
+ if !constraint.IsPlusBuild(text) {
+ continue
+ }
+ if x, err := constraint.Parse(text); err == nil {
+ if !ctxt.eval(x, allTags) {
+ shouldBuild = false
+ }
+ }
+ }
+ }
+
+ return shouldBuild, sawBinaryOnly, nil
+}
+
+func parseFileHeader(content []byte) (trimmed, goBuild []byte, sawBinaryOnly bool, err error) {
+ end := 0
+ p := content
+ ended := false // found non-blank, non-// line, so stopped accepting //go:build lines
+ inSlashStar := false // in /* */ comment
+
+Lines:
+ for len(p) > 0 {
+ line := p
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, p = line[:i], p[i+1:]
+ } else {
+ p = p[len(p):]
+ }
+ line = bytes.TrimSpace(line)
+ if len(line) == 0 && !ended { // Blank line
+ // Remember position of most recent blank line.
+ // When we find the first non-blank, non-// line,
+ // this "end" position marks the latest file position
+ // where a //go:build line can appear.
+ // (It must appear _before_ a blank line before the non-blank, non-// line.
+ // Yes, that's confusing, which is part of why we moved to //go:build lines.)
+ // Note that ended==false here means that inSlashStar==false,
+ // since seeing a /* would have set ended==true.
+ end = len(content) - len(p)
+ continue Lines
+ }
+ if !bytes.HasPrefix(line, slashSlash) { // Not comment line
+ ended = true
+ }
+
+ if !inSlashStar && isGoBuildComment(line) {
+ if goBuild != nil {
+ return nil, nil, false, errMultipleGoBuild
+ }
+ goBuild = line
+ }
+ if !inSlashStar && bytes.Equal(line, binaryOnlyComment) {
+ sawBinaryOnly = true
+ }
+
+ Comments:
+ for len(line) > 0 {
+ if inSlashStar {
+ if i := bytes.Index(line, starSlash); i >= 0 {
+ inSlashStar = false
+ line = bytes.TrimSpace(line[i+len(starSlash):])
+ continue Comments
+ }
+ continue Lines
+ }
+ if bytes.HasPrefix(line, slashSlash) {
+ continue Lines
+ }
+ if bytes.HasPrefix(line, slashStar) {
+ inSlashStar = true
+ line = bytes.TrimSpace(line[len(slashStar):])
+ continue Comments
+ }
+ // Found non-comment text.
+ break Lines
+ }
+ }
+
+ return content[:end], goBuild, sawBinaryOnly, nil
+}
+
+// saveCgo saves the information from the #cgo lines in the import "C" comment.
+// These lines set CFLAGS, CPPFLAGS, CXXFLAGS and LDFLAGS and pkg-config directives
+// that affect the way cgo's C code is built.
+func (ctxt *Context) saveCgo(filename string, di *Package, cg *ast.CommentGroup) error {
+ text := cg.Text()
+ for _, line := range strings.Split(text, "\n") {
+ orig := line
+
+ // Line is
+ // #cgo [GOOS/GOARCH...] LDFLAGS: stuff
+ //
+ line = strings.TrimSpace(line)
+ if len(line) < 5 || line[:4] != "#cgo" || (line[4] != ' ' && line[4] != '\t') {
+ continue
+ }
+
+ // Split at colon.
+ line, argstr, ok := strings.Cut(strings.TrimSpace(line[4:]), ":")
+ if !ok {
+ return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
+ }
+
+ // Parse GOOS/GOARCH stuff.
+ f := strings.Fields(line)
+ if len(f) < 1 {
+ return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
+ }
+
+ cond, verb := f[:len(f)-1], f[len(f)-1]
+ if len(cond) > 0 {
+ ok := false
+ for _, c := range cond {
+ if ctxt.matchAuto(c, nil) {
+ ok = true
+ break
+ }
+ }
+ if !ok {
+ continue
+ }
+ }
+
+ args, err := splitQuoted(argstr)
+ if err != nil {
+ return fmt.Errorf("%s: invalid #cgo line: %s", filename, orig)
+ }
+ for i, arg := range args {
+ if arg, ok = expandSrcDir(arg, di.Dir); !ok {
+ return fmt.Errorf("%s: malformed #cgo argument: %s", filename, arg)
+ }
+ args[i] = arg
+ }
+
+ switch verb {
+ case "CFLAGS", "CPPFLAGS", "CXXFLAGS", "FFLAGS", "LDFLAGS":
+ // Change relative paths to absolute.
+ ctxt.makePathsAbsolute(args, di.Dir)
+ }
+
+ switch verb {
+ case "CFLAGS":
+ di.CgoCFLAGS = append(di.CgoCFLAGS, args...)
+ case "CPPFLAGS":
+ di.CgoCPPFLAGS = append(di.CgoCPPFLAGS, args...)
+ case "CXXFLAGS":
+ di.CgoCXXFLAGS = append(di.CgoCXXFLAGS, args...)
+ case "FFLAGS":
+ di.CgoFFLAGS = append(di.CgoFFLAGS, args...)
+ case "LDFLAGS":
+ di.CgoLDFLAGS = append(di.CgoLDFLAGS, args...)
+ case "pkg-config":
+ di.CgoPkgConfig = append(di.CgoPkgConfig, args...)
+ default:
+ return fmt.Errorf("%s: invalid #cgo verb: %s", filename, orig)
+ }
+ }
+ return nil
+}
+
+// expandSrcDir expands any occurrence of ${SRCDIR}, making sure
+// the result is safe for the shell.
+func expandSrcDir(str string, srcdir string) (string, bool) {
+ // "\" delimited paths cause safeCgoName to fail
+ // so convert native paths with a different delimiter
+ // to "/" before starting (eg: on windows).
+ srcdir = filepath.ToSlash(srcdir)
+
+ chunks := strings.Split(str, "${SRCDIR}")
+ if len(chunks) < 2 {
+ return str, safeCgoName(str)
+ }
+ ok := true
+ for _, chunk := range chunks {
+ ok = ok && (chunk == "" || safeCgoName(chunk))
+ }
+ ok = ok && (srcdir == "" || safeCgoName(srcdir))
+ res := strings.Join(chunks, srcdir)
+ return res, ok && res != ""
+}
+
+// makePathsAbsolute looks for compiler options that take paths and
+// makes them absolute. We do this because through the 1.8 release we
+// ran the compiler in the package directory, so any relative -I or -L
+// options would be relative to that directory. In 1.9 we changed to
+// running the compiler in the build directory, to get consistent
+// build results (issue #19964). To keep builds working, we change any
+// relative -I or -L options to be absolute.
+//
+// Using filepath.IsAbs and filepath.Join here means the results will be
+// different on different systems, but that's OK: -I and -L options are
+// inherently system-dependent.
+func (ctxt *Context) makePathsAbsolute(args []string, srcDir string) {
+ nextPath := false
+ for i, arg := range args {
+ if nextPath {
+ if !filepath.IsAbs(arg) {
+ args[i] = filepath.Join(srcDir, arg)
+ }
+ nextPath = false
+ } else if strings.HasPrefix(arg, "-I") || strings.HasPrefix(arg, "-L") {
+ if len(arg) == 2 {
+ nextPath = true
+ } else {
+ if !filepath.IsAbs(arg[2:]) {
+ args[i] = arg[:2] + filepath.Join(srcDir, arg[2:])
+ }
+ }
+ }
+ }
+}
+
+// NOTE: $ is not safe for the shell, but it is allowed here because of linker options like -Wl,$ORIGIN.
+// We never pass these arguments to a shell (just to programs we construct argv for), so this should be okay.
+// See golang.org/issue/6038.
+// The @ is for OS X. See golang.org/issue/13720.
+// The % is for Jenkins. See golang.org/issue/16959.
+// The ! is because module paths may use them. See golang.org/issue/26716.
+// The ~ and ^ are for sr.ht. See golang.org/issue/32260.
+const safeString = "+-.,/0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz:$@%! ~^"
+
+func safeCgoName(s string) bool {
+ if s == "" {
+ return false
+ }
+ for i := 0; i < len(s); i++ {
+ if c := s[i]; c < utf8.RuneSelf && strings.IndexByte(safeString, c) < 0 {
+ return false
+ }
+ }
+ return true
+}
+
+// splitQuoted splits the string s around each instance of one or more consecutive
+// white space characters while taking into account quotes and escaping, and
+// returns an array of substrings of s or an empty list if s contains only white space.
+// Single quotes and double quotes are recognized to prevent splitting within the
+// quoted region, and are removed from the resulting substrings. If a quote in s
+// isn't closed err will be set and r will have the unclosed argument as the
+// last element. The backslash is used for escaping.
+//
+// For example, the following string:
+//
+// a b:"c d" 'e''f' "g\""
+//
+// Would be parsed as:
+//
+// []string{"a", "b:c d", "ef", `g"`}
+func splitQuoted(s string) (r []string, err error) {
+ var args []string
+ arg := make([]rune, len(s))
+ escaped := false
+ quoted := false
+ quote := '\x00'
+ i := 0
+ for _, rune := range s {
+ switch {
+ case escaped:
+ escaped = false
+ case rune == '\\':
+ escaped = true
+ continue
+ case quote != '\x00':
+ if rune == quote {
+ quote = '\x00'
+ continue
+ }
+ case rune == '"' || rune == '\'':
+ quoted = true
+ quote = rune
+ continue
+ case unicode.IsSpace(rune):
+ if quoted || i > 0 {
+ quoted = false
+ args = append(args, string(arg[:i]))
+ i = 0
+ }
+ continue
+ }
+ arg[i] = rune
+ i++
+ }
+ if quoted || i > 0 {
+ args = append(args, string(arg[:i]))
+ }
+ if quote != 0 {
+ err = errors.New("unclosed quote")
+ } else if escaped {
+ err = errors.New("unfinished escaping")
+ }
+ return args, err
+}
+
+// matchAuto interprets text as either a +build or //go:build expression (whichever works),
+// reporting whether the expression matches the build context.
+//
+// matchAuto is only used for testing of tag evaluation
+// and in #cgo lines, which accept either syntax.
+func (ctxt *Context) matchAuto(text string, allTags map[string]bool) bool {
+ if strings.ContainsAny(text, "&|()") {
+ text = "//go:build " + text
+ } else {
+ text = "// +build " + text
+ }
+ x, err := constraint.Parse(text)
+ if err != nil {
+ return false
+ }
+ return ctxt.eval(x, allTags)
+}
+
+func (ctxt *Context) eval(x constraint.Expr, allTags map[string]bool) bool {
+ return x.Eval(func(tag string) bool { return ctxt.matchTag(tag, allTags) })
+}
+
+// matchTag reports whether the name is one of:
+//
+// cgo (if cgo is enabled)
+// $GOOS
+// $GOARCH
+// ctxt.Compiler
+// linux (if GOOS = android)
+// solaris (if GOOS = illumos)
+// darwin (if GOOS = ios)
+// unix (if this is a Unix GOOS)
+// boringcrypto (if GOEXPERIMENT=boringcrypto is enabled)
+// tag (if tag is listed in ctxt.BuildTags, ctxt.ToolTags, or ctxt.ReleaseTags)
+//
+// It records all consulted tags in allTags.
+func (ctxt *Context) matchTag(name string, allTags map[string]bool) bool {
+ if allTags != nil {
+ allTags[name] = true
+ }
+
+ // special tags
+ if ctxt.CgoEnabled && name == "cgo" {
+ return true
+ }
+ if name == ctxt.GOOS || name == ctxt.GOARCH || name == ctxt.Compiler {
+ return true
+ }
+ if ctxt.GOOS == "android" && name == "linux" {
+ return true
+ }
+ if ctxt.GOOS == "illumos" && name == "solaris" {
+ return true
+ }
+ if ctxt.GOOS == "ios" && name == "darwin" {
+ return true
+ }
+ if name == "unix" && unixOS[ctxt.GOOS] {
+ return true
+ }
+ if name == "boringcrypto" {
+ name = "goexperiment.boringcrypto" // boringcrypto is an old name for goexperiment.boringcrypto
+ }
+
+ // other tags
+ for _, tag := range ctxt.BuildTags {
+ if tag == name {
+ return true
+ }
+ }
+ for _, tag := range ctxt.ToolTags {
+ if tag == name {
+ return true
+ }
+ }
+ for _, tag := range ctxt.ReleaseTags {
+ if tag == name {
+ return true
+ }
+ }
+
+ return false
+}
+
+// goodOSArchFile returns false if the name contains a $GOOS or $GOARCH
+// suffix which does not match the current system.
+// The recognized name formats are:
+//
+// name_$(GOOS).*
+// name_$(GOARCH).*
+// name_$(GOOS)_$(GOARCH).*
+// name_$(GOOS)_test.*
+// name_$(GOARCH)_test.*
+// name_$(GOOS)_$(GOARCH)_test.*
+//
+// Exceptions:
+// if GOOS=android, then files with GOOS=linux are also matched.
+// if GOOS=illumos, then files with GOOS=solaris are also matched.
+// if GOOS=ios, then files with GOOS=darwin are also matched.
+func (ctxt *Context) goodOSArchFile(name string, allTags map[string]bool) bool {
+ name, _, _ = strings.Cut(name, ".")
+
+ // Before Go 1.4, a file called "linux.go" would be equivalent to having a
+ // build tag "linux" in that file. For Go 1.4 and beyond, we require this
+ // auto-tagging to apply only to files with a non-empty prefix, so
+ // "foo_linux.go" is tagged but "linux.go" is not. This allows new operating
+ // systems, such as android, to arrive without breaking existing code with
+ // innocuous source code in "android.go". The easiest fix: cut everything
+ // in the name before the initial _.
+ i := strings.Index(name, "_")
+ if i < 0 {
+ return true
+ }
+ name = name[i:] // ignore everything before first _
+
+ l := strings.Split(name, "_")
+ if n := len(l); n > 0 && l[n-1] == "test" {
+ l = l[:n-1]
+ }
+ n := len(l)
+ if n >= 2 && knownOS[l[n-2]] && knownArch[l[n-1]] {
+ if allTags != nil {
+ // In case we short-circuit on l[n-1].
+ allTags[l[n-2]] = true
+ }
+ return ctxt.matchTag(l[n-1], allTags) && ctxt.matchTag(l[n-2], allTags)
+ }
+ if n >= 1 && (knownOS[l[n-1]] || knownArch[l[n-1]]) {
+ return ctxt.matchTag(l[n-1], allTags)
+ }
+ return true
+}
+
+// ToolDir is the directory containing build tools.
+var ToolDir = getToolDir()
+
+// IsLocalImport reports whether the import path is
+// a local import path, like ".", "..", "./foo", or "../foo".
+func IsLocalImport(path string) bool {
+ return path == "." || path == ".." ||
+ strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../")
+}
+
+// ArchChar returns "?" and an error.
+// In earlier versions of Go, the returned string was used to derive
+// the compiler and linker tool names, the default object file suffix,
+// and the default linker output name. As of Go 1.5, those strings
+// no longer vary by architecture; they are compile, link, .o, and a.out, respectively.
+func ArchChar(goarch string) (string, error) {
+ return "?", errors.New("architecture letter no longer used")
+}
diff --git a/src/go/build/build_test.go b/src/go/build/build_test.go
new file mode 100644
index 0000000..2e60ecc
--- /dev/null
+++ b/src/go/build/build_test.go
@@ -0,0 +1,803 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "internal/testenv"
+ "io"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strings"
+ "testing"
+)
+
+func TestMain(m *testing.M) {
+ Default.GOROOT = testenv.GOROOT(nil)
+ os.Exit(m.Run())
+}
+
+func TestMatch(t *testing.T) {
+ ctxt := Default
+ what := "default"
+ match := func(tag string, want map[string]bool) {
+ t.Helper()
+ m := make(map[string]bool)
+ if !ctxt.matchAuto(tag, m) {
+ t.Errorf("%s context should match %s, does not", what, tag)
+ }
+ if !reflect.DeepEqual(m, want) {
+ t.Errorf("%s tags = %v, want %v", tag, m, want)
+ }
+ }
+ nomatch := func(tag string, want map[string]bool) {
+ t.Helper()
+ m := make(map[string]bool)
+ if ctxt.matchAuto(tag, m) {
+ t.Errorf("%s context should NOT match %s, does", what, tag)
+ }
+ if !reflect.DeepEqual(m, want) {
+ t.Errorf("%s tags = %v, want %v", tag, m, want)
+ }
+ }
+
+ match(runtime.GOOS+","+runtime.GOARCH, map[string]bool{runtime.GOOS: true, runtime.GOARCH: true})
+ match(runtime.GOOS+","+runtime.GOARCH+",!foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+ nomatch(runtime.GOOS+","+runtime.GOARCH+",foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+
+ what = "modified"
+ ctxt.BuildTags = []string{"foo"}
+ match(runtime.GOOS+","+runtime.GOARCH, map[string]bool{runtime.GOOS: true, runtime.GOARCH: true})
+ match(runtime.GOOS+","+runtime.GOARCH+",foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+ nomatch(runtime.GOOS+","+runtime.GOARCH+",!foo", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "foo": true})
+ match(runtime.GOOS+","+runtime.GOARCH+",!bar", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "bar": true})
+ nomatch(runtime.GOOS+","+runtime.GOARCH+",bar", map[string]bool{runtime.GOOS: true, runtime.GOARCH: true, "bar": true})
+}
+
+func TestDotSlashImport(t *testing.T) {
+ p, err := ImportDir("testdata/other", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(p.Imports) != 1 || p.Imports[0] != "./file" {
+ t.Fatalf("testdata/other: Imports=%v, want [./file]", p.Imports)
+ }
+
+ p1, err := Import("./file", "testdata/other", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if p1.Name != "file" {
+ t.Fatalf("./file: Name=%q, want %q", p1.Name, "file")
+ }
+ dir := filepath.Clean("testdata/other/file") // Clean to use \ on Windows
+ if p1.Dir != dir {
+ t.Fatalf("./file: Dir=%q, want %q", p1.Name, dir)
+ }
+}
+
+func TestEmptyImport(t *testing.T) {
+ p, err := Import("", testenv.GOROOT(t), FindOnly)
+ if err == nil {
+ t.Fatal(`Import("") returned nil error.`)
+ }
+ if p == nil {
+ t.Fatal(`Import("") returned nil package.`)
+ }
+ if p.ImportPath != "" {
+ t.Fatalf("ImportPath=%q, want %q.", p.ImportPath, "")
+ }
+}
+
+func TestEmptyFolderImport(t *testing.T) {
+ _, err := Import(".", "testdata/empty", 0)
+ if _, ok := err.(*NoGoError); !ok {
+ t.Fatal(`Import("testdata/empty") did not return NoGoError.`)
+ }
+}
+
+func TestMultiplePackageImport(t *testing.T) {
+ pkg, err := Import(".", "testdata/multi", 0)
+
+ mpe, ok := err.(*MultiplePackageError)
+ if !ok {
+ t.Fatal(`Import("testdata/multi") did not return MultiplePackageError.`)
+ }
+ want := &MultiplePackageError{
+ Dir: filepath.FromSlash("testdata/multi"),
+ Packages: []string{"main", "test_package"},
+ Files: []string{"file.go", "file_appengine.go"},
+ }
+ if !reflect.DeepEqual(mpe, want) {
+ t.Errorf("err = %#v; want %#v", mpe, want)
+ }
+
+ // TODO(#45999): Since the name is ambiguous, pkg.Name should be left empty.
+ if wantName := "main"; pkg.Name != wantName {
+ t.Errorf("pkg.Name = %q; want %q", pkg.Name, wantName)
+ }
+
+ if wantGoFiles := []string{"file.go", "file_appengine.go"}; !reflect.DeepEqual(pkg.GoFiles, wantGoFiles) {
+ t.Errorf("pkg.GoFiles = %q; want %q", pkg.GoFiles, wantGoFiles)
+ }
+
+ if wantInvalidFiles := []string{"file_appengine.go"}; !reflect.DeepEqual(pkg.InvalidGoFiles, wantInvalidFiles) {
+ t.Errorf("pkg.InvalidGoFiles = %q; want %q", pkg.InvalidGoFiles, wantInvalidFiles)
+ }
+}
+
+func TestLocalDirectory(t *testing.T) {
+ if runtime.GOOS == "ios" {
+ t.Skipf("skipping on %s/%s, no valid GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ cwd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ p, err := ImportDir(cwd, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if p.ImportPath != "go/build" {
+ t.Fatalf("ImportPath=%q, want %q", p.ImportPath, "go/build")
+ }
+}
+
+var shouldBuildTests = []struct {
+ name string
+ content string
+ tags map[string]bool
+ binaryOnly bool
+ shouldBuild bool
+ err error
+}{
+ {
+ name: "Yes",
+ content: "// +build yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "Yes2",
+ content: "//go:build yes\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "Or",
+ content: "// +build no yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: true,
+ },
+ {
+ name: "Or2",
+ content: "//go:build no || yes\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: true,
+ },
+ {
+ name: "And",
+ content: "// +build no,yes\n\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "And2",
+ content: "//go:build no && yes\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true, "no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Cgo",
+ content: "// +build cgo\n\n" +
+ "// Copyright The Go Authors.\n\n" +
+ "// This package implements parsing of tags like\n" +
+ "// +build tag1\n" +
+ "package build",
+ tags: map[string]bool{"cgo": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Cgo2",
+ content: "//go:build cgo\n" +
+ "// Copyright The Go Authors.\n\n" +
+ "// This package implements parsing of tags like\n" +
+ "// +build tag1\n" +
+ "package build",
+ tags: map[string]bool{"cgo": true},
+ shouldBuild: false,
+ },
+ {
+ name: "AfterPackage",
+ content: "// Copyright The Go Authors.\n\n" +
+ "package build\n\n" +
+ "// shouldBuild checks tags given by lines of the form\n" +
+ "// +build tag\n" +
+ "//go:build tag\n" +
+ "func shouldBuild(content []byte)\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooClose",
+ content: "// +build yes\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooClose2",
+ content: "//go:build yes\n" +
+ "package main\n",
+ tags: map[string]bool{"yes": true},
+ shouldBuild: true,
+ },
+ {
+ name: "TooCloseNo",
+ content: "// +build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "TooCloseNo2",
+ content: "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "BinaryOnly",
+ content: "//go:binary-only-package\n" +
+ "// +build yes\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ binaryOnly: true,
+ shouldBuild: true,
+ },
+ {
+ name: "BinaryOnly2",
+ content: "//go:binary-only-package\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ binaryOnly: true,
+ shouldBuild: false,
+ },
+ {
+ name: "ValidGoBuild",
+ content: "// +build yes\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "MissingBuild2",
+ content: "/* */\n" +
+ "// +build yes\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Comment1",
+ content: "/*\n" +
+ "//go:build no\n" +
+ "*/\n\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "Comment2",
+ content: "/*\n" +
+ "text\n" +
+ "*/\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Comment3",
+ content: "/*/*/ /* hi *//* \n" +
+ "text\n" +
+ "*/\n\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ shouldBuild: false,
+ },
+ {
+ name: "Comment4",
+ content: "/**///go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{},
+ shouldBuild: true,
+ },
+ {
+ name: "Comment5",
+ content: "/**/\n" +
+ "//go:build no\n" +
+ "package main\n",
+ tags: map[string]bool{"no": true},
+ shouldBuild: false,
+ },
+}
+
+func TestShouldBuild(t *testing.T) {
+ for _, tt := range shouldBuildTests {
+ t.Run(tt.name, func(t *testing.T) {
+ ctx := &Context{BuildTags: []string{"yes"}}
+ tags := map[string]bool{}
+ shouldBuild, binaryOnly, err := ctx.shouldBuild([]byte(tt.content), tags)
+ if shouldBuild != tt.shouldBuild || binaryOnly != tt.binaryOnly || !reflect.DeepEqual(tags, tt.tags) || err != tt.err {
+ t.Errorf("mismatch:\n"+
+ "have shouldBuild=%v, binaryOnly=%v, tags=%v, err=%v\n"+
+ "want shouldBuild=%v, binaryOnly=%v, tags=%v, err=%v",
+ shouldBuild, binaryOnly, tags, err,
+ tt.shouldBuild, tt.binaryOnly, tt.tags, tt.err)
+ }
+ })
+ }
+}
+
+func TestGoodOSArchFile(t *testing.T) {
+ ctx := &Context{BuildTags: []string{"linux"}, GOOS: "darwin"}
+ m := map[string]bool{}
+ want := map[string]bool{"linux": true}
+ if !ctx.goodOSArchFile("hello_linux.go", m) {
+ t.Errorf("goodOSArchFile(hello_linux.go) = false, want true")
+ }
+ if !reflect.DeepEqual(m, want) {
+ t.Errorf("goodOSArchFile(hello_linux.go) tags = %v, want %v", m, want)
+ }
+}
+
+type readNopCloser struct {
+ io.Reader
+}
+
+func (r readNopCloser) Close() error {
+ return nil
+}
+
+var (
+ ctxtP9 = Context{GOARCH: "arm", GOOS: "plan9"}
+ ctxtAndroid = Context{GOARCH: "arm", GOOS: "android"}
+)
+
+var matchFileTests = []struct {
+ ctxt Context
+ name string
+ data string
+ match bool
+}{
+ {ctxtP9, "foo_arm.go", "", true},
+ {ctxtP9, "foo1_arm.go", "// +build linux\n\npackage main\n", false},
+ {ctxtP9, "foo_darwin.go", "", false},
+ {ctxtP9, "foo.go", "", true},
+ {ctxtP9, "foo1.go", "// +build linux\n\npackage main\n", false},
+ {ctxtP9, "foo.badsuffix", "", false},
+ {ctxtAndroid, "foo_linux.go", "", true},
+ {ctxtAndroid, "foo_android.go", "", true},
+ {ctxtAndroid, "foo_plan9.go", "", false},
+ {ctxtAndroid, "android.go", "", true},
+ {ctxtAndroid, "plan9.go", "", true},
+ {ctxtAndroid, "plan9_test.go", "", true},
+ {ctxtAndroid, "arm.s", "", true},
+ {ctxtAndroid, "amd64.s", "", true},
+}
+
+func TestMatchFile(t *testing.T) {
+ for _, tt := range matchFileTests {
+ ctxt := tt.ctxt
+ ctxt.OpenFile = func(path string) (r io.ReadCloser, err error) {
+ if path != "x+"+tt.name {
+ t.Fatalf("OpenFile asked for %q, expected %q", path, "x+"+tt.name)
+ }
+ return &readNopCloser{strings.NewReader(tt.data)}, nil
+ }
+ ctxt.JoinPath = func(elem ...string) string {
+ return strings.Join(elem, "+")
+ }
+ match, err := ctxt.MatchFile("x", tt.name)
+ if match != tt.match || err != nil {
+ t.Fatalf("MatchFile(%q) = %v, %v, want %v, nil", tt.name, match, err, tt.match)
+ }
+ }
+}
+
+func TestImportCmd(t *testing.T) {
+ if runtime.GOOS == "ios" {
+ t.Skipf("skipping on %s/%s, no valid GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ p, err := Import("cmd/internal/objfile", "", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !strings.HasSuffix(filepath.ToSlash(p.Dir), "src/cmd/internal/objfile") {
+ t.Fatalf("Import cmd/internal/objfile returned Dir=%q, want %q", filepath.ToSlash(p.Dir), ".../src/cmd/internal/objfile")
+ }
+}
+
+var (
+ expandSrcDirPath = filepath.Join(string(filepath.Separator)+"projects", "src", "add")
+)
+
+var expandSrcDirTests = []struct {
+ input, expected string
+}{
+ {"-L ${SRCDIR}/libs -ladd", "-L /projects/src/add/libs -ladd"},
+ {"${SRCDIR}/add_linux_386.a -pthread -lstdc++", "/projects/src/add/add_linux_386.a -pthread -lstdc++"},
+ {"Nothing to expand here!", "Nothing to expand here!"},
+ {"$", "$"},
+ {"$$", "$$"},
+ {"${", "${"},
+ {"$}", "$}"},
+ {"$FOO ${BAR}", "$FOO ${BAR}"},
+ {"Find me the $SRCDIRECTORY.", "Find me the $SRCDIRECTORY."},
+ {"$SRCDIR is missing braces", "$SRCDIR is missing braces"},
+}
+
+func TestExpandSrcDir(t *testing.T) {
+ for _, test := range expandSrcDirTests {
+ output, _ := expandSrcDir(test.input, expandSrcDirPath)
+ if output != test.expected {
+ t.Errorf("%q expands to %q with SRCDIR=%q when %q is expected", test.input, output, expandSrcDirPath, test.expected)
+ } else {
+ t.Logf("%q expands to %q with SRCDIR=%q", test.input, output, expandSrcDirPath)
+ }
+ }
+}
+
+func TestShellSafety(t *testing.T) {
+ tests := []struct {
+ input, srcdir, expected string
+ result bool
+ }{
+ {"-I${SRCDIR}/../include", "/projects/src/issue 11868", "-I/projects/src/issue 11868/../include", true},
+ {"-I${SRCDIR}", "~wtf$@%^", "-I~wtf$@%^", true},
+ {"-X${SRCDIR}/1,${SRCDIR}/2", "/projects/src/issue 11868", "-X/projects/src/issue 11868/1,/projects/src/issue 11868/2", true},
+ {"-I/tmp -I/tmp", "/tmp2", "-I/tmp -I/tmp", true},
+ {"-I/tmp", "/tmp/[0]", "-I/tmp", true},
+ {"-I${SRCDIR}/dir", "/tmp/[0]", "-I/tmp/[0]/dir", false},
+ {"-I${SRCDIR}/dir", "/tmp/go go", "-I/tmp/go go/dir", true},
+ {"-I${SRCDIR}/dir dir", "/tmp/go", "-I/tmp/go/dir dir", true},
+ }
+ for _, test := range tests {
+ output, ok := expandSrcDir(test.input, test.srcdir)
+ if ok != test.result {
+ t.Errorf("Expected %t while %q expands to %q with SRCDIR=%q; got %t", test.result, test.input, output, test.srcdir, ok)
+ }
+ if output != test.expected {
+ t.Errorf("Expected %q while %q expands with SRCDIR=%q; got %q", test.expected, test.input, test.srcdir, output)
+ }
+ }
+}
+
+// Want to get a "cannot find package" error when directory for package does not exist.
+// There should be valid partial information in the returned non-nil *Package.
+func TestImportDirNotExist(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+ ctxt := Default
+
+ emptyDir := t.TempDir()
+
+ ctxt.GOPATH = emptyDir
+ ctxt.Dir = emptyDir
+
+ tests := []struct {
+ label string
+ path, srcDir string
+ mode ImportMode
+ }{
+ {"Import(full, 0)", "go/build/doesnotexist", "", 0},
+ {"Import(local, 0)", "./doesnotexist", filepath.Join(ctxt.GOROOT, "src/go/build"), 0},
+ {"Import(full, FindOnly)", "go/build/doesnotexist", "", FindOnly},
+ {"Import(local, FindOnly)", "./doesnotexist", filepath.Join(ctxt.GOROOT, "src/go/build"), FindOnly},
+ }
+
+ defer os.Setenv("GO111MODULE", os.Getenv("GO111MODULE"))
+
+ for _, GO111MODULE := range []string{"off", "on"} {
+ t.Run("GO111MODULE="+GO111MODULE, func(t *testing.T) {
+ os.Setenv("GO111MODULE", GO111MODULE)
+
+ for _, test := range tests {
+ p, err := ctxt.Import(test.path, test.srcDir, test.mode)
+
+ errOk := (err != nil && strings.HasPrefix(err.Error(), "cannot find package"))
+ wantErr := `"cannot find package" error`
+ if test.srcDir == "" {
+ if err != nil && strings.Contains(err.Error(), "is not in GOROOT") {
+ errOk = true
+ }
+ wantErr = `"cannot find package" or "is not in GOROOT" error`
+ }
+ if !errOk {
+ t.Errorf("%s got error: %q, want %s", test.label, err, wantErr)
+ }
+ // If an error occurs, build.Import is documented to return
+ // a non-nil *Package containing partial information.
+ if p == nil {
+ t.Fatalf(`%s got nil p, want non-nil *Package`, test.label)
+ }
+ // Verify partial information in p.
+ if p.ImportPath != "go/build/doesnotexist" {
+ t.Errorf(`%s got p.ImportPath: %q, want "go/build/doesnotexist"`, test.label, p.ImportPath)
+ }
+ }
+ })
+ }
+}
+
+func TestImportVendor(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+
+ t.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ p, err := ctxt.Import("c/d", filepath.Join(ctxt.GOPATH, "src/a/b"), 0)
+ if err != nil {
+ t.Fatalf("cannot find vendored c/d from testdata src/a/b directory: %v", err)
+ }
+ want := "a/vendor/c/d"
+ if p.ImportPath != want {
+ t.Fatalf("Import succeeded but found %q, want %q", p.ImportPath, want)
+ }
+}
+
+func BenchmarkImportVendor(b *testing.B) {
+ testenv.MustHaveGoBuild(b) // really must just have source
+
+ b.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ b.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ dir := filepath.Join(ctxt.GOPATH, "src/a/b")
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ _, err := ctxt.Import("c/d", dir, 0)
+ if err != nil {
+ b.Fatalf("cannot find vendored c/d from testdata src/a/b directory: %v", err)
+ }
+ }
+}
+
+func TestImportVendorFailure(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+
+ t.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ p, err := ctxt.Import("x.com/y/z", filepath.Join(ctxt.GOPATH, "src/a/b"), 0)
+ if err == nil {
+ t.Fatalf("found made-up package x.com/y/z in %s", p.Dir)
+ }
+
+ e := err.Error()
+ if !strings.Contains(e, " (vendor tree)") {
+ t.Fatalf("error on failed import does not mention GOROOT/src/vendor directory:\n%s", e)
+ }
+}
+
+func TestImportVendorParentFailure(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // really must just have source
+
+ t.Setenv("GO111MODULE", "off")
+
+ ctxt := Default
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatal(err)
+ }
+ ctxt.GOPATH = filepath.Join(wd, "testdata/withvendor")
+ // This import should fail because the vendor/c directory has no source code.
+ p, err := ctxt.Import("c", filepath.Join(ctxt.GOPATH, "src/a/b"), 0)
+ if err == nil {
+ t.Fatalf("found empty parent in %s", p.Dir)
+ }
+ if p != nil && p.Dir != "" {
+ t.Fatalf("decided to use %s", p.Dir)
+ }
+ e := err.Error()
+ if !strings.Contains(e, " (vendor tree)") {
+ t.Fatalf("error on failed import does not mention GOROOT/src/vendor directory:\n%s", e)
+ }
+}
+
+// Check that a package is loaded in module mode if GO111MODULE=on, even when
+// no go.mod file is present. It should fail to resolve packages outside std.
+// Verifies golang.org/issue/34669.
+func TestImportPackageOutsideModule(t *testing.T) {
+ testenv.MustHaveGoBuild(t)
+
+ // Disable module fetching for this test so that 'go list' fails quickly
+ // without trying to find the latest version of a module.
+ t.Setenv("GOPROXY", "off")
+
+ // Create a GOPATH in a temporary directory. We don't use testdata
+ // because it's in GOROOT, which interferes with the module heuristic.
+ gopath := t.TempDir()
+ if err := os.MkdirAll(filepath.Join(gopath, "src/example.com/p"), 0777); err != nil {
+ t.Fatal(err)
+ }
+ if err := os.WriteFile(filepath.Join(gopath, "src/example.com/p/p.go"), []byte("package p"), 0666); err != nil {
+ t.Fatal(err)
+ }
+
+ t.Setenv("GO111MODULE", "on")
+ t.Setenv("GOPATH", gopath)
+ ctxt := Default
+ ctxt.GOPATH = gopath
+ ctxt.Dir = filepath.Join(gopath, "src/example.com/p")
+
+ want := "go.mod file not found in current directory or any parent directory"
+ if _, err := ctxt.Import("example.com/p", gopath, FindOnly); err == nil {
+ t.Fatal("importing package when no go.mod is present succeeded unexpectedly")
+ } else if errStr := err.Error(); !strings.Contains(errStr, want) {
+ t.Fatalf("error when importing package when no go.mod is present: got %q; want %q", errStr, want)
+ } else {
+ t.Logf(`ctxt.Import("example.com/p", _, FindOnly): %v`, err)
+ }
+}
+
+// TestIssue23594 prevents go/build from regressing and populating Package.Doc
+// from comments in test files.
+func TestIssue23594(t *testing.T) {
+ // Package testdata/doc contains regular and external test files
+ // with comments attached to their package declarations. The names of the files
+ // ensure that we see the comments from the test files first.
+ p, err := ImportDir("testdata/doc", 0)
+ if err != nil {
+ t.Fatalf("could not import testdata: %v", err)
+ }
+
+ if p.Doc != "Correct" {
+ t.Fatalf("incorrectly set .Doc to %q", p.Doc)
+ }
+}
+
+// TestIssue56509 tests that go/build does not add non-go files to InvalidGoFiles
+// when they have unparsable comments.
+func TestIssue56509(t *testing.T) {
+ // The directory testdata/bads contains a .s file that has an unparsable
+ // comment. (go/build parses initial comments in non-go files looking for
+ // //go:build or //+go build comments).
+ p, err := ImportDir("testdata/bads", 0)
+ if err == nil {
+ t.Fatalf("could not import testdata/bads: %v", err)
+ }
+
+ if len(p.InvalidGoFiles) != 0 {
+ t.Fatalf("incorrectly added non-go file to InvalidGoFiles")
+ }
+}
+
+// TestMissingImportErrorRepetition checks that when an unknown package is
+// imported, the package path is only shown once in the error.
+// Verifies golang.org/issue/34752.
+func TestMissingImportErrorRepetition(t *testing.T) {
+ testenv.MustHaveGoBuild(t) // need 'go list' internally
+ tmp := t.TempDir()
+ if err := os.WriteFile(filepath.Join(tmp, "go.mod"), []byte("module m"), 0666); err != nil {
+ t.Fatal(err)
+ }
+ t.Setenv("GO111MODULE", "on")
+ t.Setenv("GOPROXY", "off")
+ t.Setenv("GONOPROXY", "none")
+
+ ctxt := Default
+ ctxt.Dir = tmp
+
+ pkgPath := "example.com/hello"
+ _, err := ctxt.Import(pkgPath, tmp, FindOnly)
+ if err == nil {
+ t.Fatal("unexpected success")
+ }
+
+ // Don't count the package path with a URL like https://...?go-get=1.
+ // See golang.org/issue/35986.
+ errStr := strings.ReplaceAll(err.Error(), "://"+pkgPath+"?go-get=1", "://...?go-get=1")
+
+ // Also don't count instances in suggested "go get" or similar commands
+ // (see https://golang.org/issue/41576). The suggested command typically
+ // follows a semicolon.
+ errStr, _, _ = strings.Cut(errStr, ";")
+
+ if n := strings.Count(errStr, pkgPath); n != 1 {
+ t.Fatalf("package path %q appears in error %d times; should appear once\nerror: %v", pkgPath, n, err)
+ }
+}
+
+// TestCgoImportsIgnored checks that imports in cgo files are not included
+// in the imports list when cgo is disabled.
+// Verifies golang.org/issue/35946.
+func TestCgoImportsIgnored(t *testing.T) {
+ ctxt := Default
+ ctxt.CgoEnabled = false
+ p, err := ctxt.ImportDir("testdata/cgo_disabled", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, path := range p.Imports {
+ if path == "should/be/ignored" {
+ t.Errorf("found import %q in ignored cgo file", path)
+ }
+ }
+}
+
+// Issue #52053. Check that if there is a file x_GOOS_GOARCH.go that both
+// GOOS and GOARCH show up in the Package.AllTags field. We test both the
+// case where the file matches and where the file does not match.
+// The latter case used to fail, incorrectly omitting GOOS.
+func TestAllTags(t *testing.T) {
+ ctxt := Default
+ ctxt.GOARCH = "arm"
+ ctxt.GOOS = "netbsd"
+ p, err := ctxt.ImportDir("testdata/alltags", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ want := []string{"arm", "netbsd"}
+ if !reflect.DeepEqual(p.AllTags, want) {
+ t.Errorf("AllTags = %v, want %v", p.AllTags, want)
+ }
+ wantFiles := []string{"alltags.go", "x_netbsd_arm.go"}
+ if !reflect.DeepEqual(p.GoFiles, wantFiles) {
+ t.Errorf("GoFiles = %v, want %v", p.GoFiles, wantFiles)
+ }
+
+ ctxt.GOARCH = "amd64"
+ ctxt.GOOS = "linux"
+ p, err = ctxt.ImportDir("testdata/alltags", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(p.AllTags, want) {
+ t.Errorf("AllTags = %v, want %v", p.AllTags, want)
+ }
+ wantFiles = []string{"alltags.go"}
+ if !reflect.DeepEqual(p.GoFiles, wantFiles) {
+ t.Errorf("GoFiles = %v, want %v", p.GoFiles, wantFiles)
+ }
+}
+
+func TestAllTagsNonSourceFile(t *testing.T) {
+ p, err := Default.ImportDir("testdata/non_source_tags", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(p.AllTags) > 0 {
+ t.Errorf("AllTags = %v, want empty", p.AllTags)
+ }
+}
diff --git a/src/go/build/constraint/expr.go b/src/go/build/constraint/expr.go
new file mode 100644
index 0000000..505cbff
--- /dev/null
+++ b/src/go/build/constraint/expr.go
@@ -0,0 +1,574 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package constraint implements parsing and evaluation of build constraint lines.
+// See https://golang.org/cmd/go/#hdr-Build_constraints for documentation about build constraints themselves.
+//
+// This package parses both the original “// +build” syntax and the “//go:build” syntax that was added in Go 1.17.
+// See https://golang.org/design/draft-gobuild for details about the “//go:build” syntax.
+package constraint
+
+import (
+ "errors"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// An Expr is a build tag constraint expression.
+// The underlying concrete type is *AndExpr, *OrExpr, *NotExpr, or *TagExpr.
+type Expr interface {
+ // String returns the string form of the expression,
+ // using the boolean syntax used in //go:build lines.
+ String() string
+
+ // Eval reports whether the expression evaluates to true.
+ // It calls ok(tag) as needed to find out whether a given build tag
+ // is satisfied by the current build configuration.
+ Eval(ok func(tag string) bool) bool
+
+ // The presence of an isExpr method explicitly marks the type as an Expr.
+ // Only implementations in this package should be used as Exprs.
+ isExpr()
+}
+
+// A TagExpr is an Expr for the single tag Tag.
+type TagExpr struct {
+ Tag string // for example, “linux” or “cgo”
+}
+
+func (x *TagExpr) isExpr() {}
+
+func (x *TagExpr) Eval(ok func(tag string) bool) bool {
+ return ok(x.Tag)
+}
+
+func (x *TagExpr) String() string {
+ return x.Tag
+}
+
+func tag(tag string) Expr { return &TagExpr{tag} }
+
+// A NotExpr represents the expression !X (the negation of X).
+type NotExpr struct {
+ X Expr
+}
+
+func (x *NotExpr) isExpr() {}
+
+func (x *NotExpr) Eval(ok func(tag string) bool) bool {
+ return !x.X.Eval(ok)
+}
+
+func (x *NotExpr) String() string {
+ s := x.X.String()
+ switch x.X.(type) {
+ case *AndExpr, *OrExpr:
+ s = "(" + s + ")"
+ }
+ return "!" + s
+}
+
+func not(x Expr) Expr { return &NotExpr{x} }
+
+// An AndExpr represents the expression X && Y.
+type AndExpr struct {
+ X, Y Expr
+}
+
+func (x *AndExpr) isExpr() {}
+
+func (x *AndExpr) Eval(ok func(tag string) bool) bool {
+ // Note: Eval both, to make sure ok func observes all tags.
+ xok := x.X.Eval(ok)
+ yok := x.Y.Eval(ok)
+ return xok && yok
+}
+
+func (x *AndExpr) String() string {
+ return andArg(x.X) + " && " + andArg(x.Y)
+}
+
+func andArg(x Expr) string {
+ s := x.String()
+ if _, ok := x.(*OrExpr); ok {
+ s = "(" + s + ")"
+ }
+ return s
+}
+
+func and(x, y Expr) Expr {
+ return &AndExpr{x, y}
+}
+
+// An OrExpr represents the expression X || Y.
+type OrExpr struct {
+ X, Y Expr
+}
+
+func (x *OrExpr) isExpr() {}
+
+func (x *OrExpr) Eval(ok func(tag string) bool) bool {
+ // Note: Eval both, to make sure ok func observes all tags.
+ xok := x.X.Eval(ok)
+ yok := x.Y.Eval(ok)
+ return xok || yok
+}
+
+func (x *OrExpr) String() string {
+ return orArg(x.X) + " || " + orArg(x.Y)
+}
+
+func orArg(x Expr) string {
+ s := x.String()
+ if _, ok := x.(*AndExpr); ok {
+ s = "(" + s + ")"
+ }
+ return s
+}
+
+func or(x, y Expr) Expr {
+ return &OrExpr{x, y}
+}
+
+// A SyntaxError reports a syntax error in a parsed build expression.
+type SyntaxError struct {
+ Offset int // byte offset in input where error was detected
+ Err string // description of error
+}
+
+func (e *SyntaxError) Error() string {
+ return e.Err
+}
+
+var errNotConstraint = errors.New("not a build constraint")
+
+// Parse parses a single build constraint line of the form “//go:build ...” or “// +build ...”
+// and returns the corresponding boolean expression.
+func Parse(line string) (Expr, error) {
+ if text, ok := splitGoBuild(line); ok {
+ return parseExpr(text)
+ }
+ if text, ok := splitPlusBuild(line); ok {
+ return parsePlusBuildExpr(text), nil
+ }
+ return nil, errNotConstraint
+}
+
+// IsGoBuild reports whether the line of text is a “//go:build” constraint.
+// It only checks the prefix of the text, not that the expression itself parses.
+func IsGoBuild(line string) bool {
+ _, ok := splitGoBuild(line)
+ return ok
+}
+
+// splitGoBuild splits apart the leading //go:build prefix in line from the build expression itself.
+// It returns "", false if the input is not a //go:build line or if the input contains multiple lines.
+func splitGoBuild(line string) (expr string, ok bool) {
+ // A single trailing newline is OK; otherwise multiple lines are not.
+ if len(line) > 0 && line[len(line)-1] == '\n' {
+ line = line[:len(line)-1]
+ }
+ if strings.Contains(line, "\n") {
+ return "", false
+ }
+
+ if !strings.HasPrefix(line, "//go:build") {
+ return "", false
+ }
+
+ line = strings.TrimSpace(line)
+ line = line[len("//go:build"):]
+
+ // If strings.TrimSpace finds more to trim after removing the //go:build prefix,
+ // it means that the prefix was followed by a space, making this a //go:build line
+ // (as opposed to a //go:buildsomethingelse line).
+ // If line is empty, we had "//go:build" by itself, which also counts.
+ trim := strings.TrimSpace(line)
+ if len(line) == len(trim) && line != "" {
+ return "", false
+ }
+
+ return trim, true
+}
+
+// An exprParser holds state for parsing a build expression.
+type exprParser struct {
+ s string // input string
+ i int // next read location in s
+
+ tok string // last token read
+ isTag bool
+ pos int // position (start) of last token
+}
+
+// parseExpr parses a boolean build tag expression.
+func parseExpr(text string) (x Expr, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if e, ok := e.(*SyntaxError); ok {
+ err = e
+ return
+ }
+ panic(e) // unreachable unless parser has a bug
+ }
+ }()
+
+ p := &exprParser{s: text}
+ x = p.or()
+ if p.tok != "" {
+ panic(&SyntaxError{Offset: p.pos, Err: "unexpected token " + p.tok})
+ }
+ return x, nil
+}
+
+// or parses a sequence of || expressions.
+// On entry, the next input token has not yet been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) or() Expr {
+ x := p.and()
+ for p.tok == "||" {
+ x = or(x, p.and())
+ }
+ return x
+}
+
+// and parses a sequence of && expressions.
+// On entry, the next input token has not yet been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) and() Expr {
+ x := p.not()
+ for p.tok == "&&" {
+ x = and(x, p.not())
+ }
+ return x
+}
+
+// not parses a ! expression.
+// On entry, the next input token has not yet been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) not() Expr {
+ p.lex()
+ if p.tok == "!" {
+ p.lex()
+ if p.tok == "!" {
+ panic(&SyntaxError{Offset: p.pos, Err: "double negation not allowed"})
+ }
+ return not(p.atom())
+ }
+ return p.atom()
+}
+
+// atom parses a tag or a parenthesized expression.
+// On entry, the next input token HAS been lexed.
+// On exit, the next input token has been lexed and is in p.tok.
+func (p *exprParser) atom() Expr {
+ // first token already in p.tok
+ if p.tok == "(" {
+ pos := p.pos
+ defer func() {
+ if e := recover(); e != nil {
+ if e, ok := e.(*SyntaxError); ok && e.Err == "unexpected end of expression" {
+ e.Err = "missing close paren"
+ }
+ panic(e)
+ }
+ }()
+ x := p.or()
+ if p.tok != ")" {
+ panic(&SyntaxError{Offset: pos, Err: "missing close paren"})
+ }
+ p.lex()
+ return x
+ }
+
+ if !p.isTag {
+ if p.tok == "" {
+ panic(&SyntaxError{Offset: p.pos, Err: "unexpected end of expression"})
+ }
+ panic(&SyntaxError{Offset: p.pos, Err: "unexpected token " + p.tok})
+ }
+ tok := p.tok
+ p.lex()
+ return tag(tok)
+}
+
+// lex finds and consumes the next token in the input stream.
+// On return, p.tok is set to the token text,
+// p.isTag reports whether the token was a tag,
+// and p.pos records the byte offset of the start of the token in the input stream.
+// If lex reaches the end of the input, p.tok is set to the empty string.
+// For any other syntax error, lex panics with a SyntaxError.
+func (p *exprParser) lex() {
+ p.isTag = false
+ for p.i < len(p.s) && (p.s[p.i] == ' ' || p.s[p.i] == '\t') {
+ p.i++
+ }
+ if p.i >= len(p.s) {
+ p.tok = ""
+ p.pos = p.i
+ return
+ }
+ switch p.s[p.i] {
+ case '(', ')', '!':
+ p.pos = p.i
+ p.i++
+ p.tok = p.s[p.pos:p.i]
+ return
+
+ case '&', '|':
+ if p.i+1 >= len(p.s) || p.s[p.i+1] != p.s[p.i] {
+ panic(&SyntaxError{Offset: p.i, Err: "invalid syntax at " + string(rune(p.s[p.i]))})
+ }
+ p.pos = p.i
+ p.i += 2
+ p.tok = p.s[p.pos:p.i]
+ return
+ }
+
+ tag := p.s[p.i:]
+ for i, c := range tag {
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
+ tag = tag[:i]
+ break
+ }
+ }
+ if tag == "" {
+ c, _ := utf8.DecodeRuneInString(p.s[p.i:])
+ panic(&SyntaxError{Offset: p.i, Err: "invalid syntax at " + string(c)})
+ }
+
+ p.pos = p.i
+ p.i += len(tag)
+ p.tok = p.s[p.pos:p.i]
+ p.isTag = true
+}
+
+// IsPlusBuild reports whether the line of text is a “// +build” constraint.
+// It only checks the prefix of the text, not that the expression itself parses.
+func IsPlusBuild(line string) bool {
+ _, ok := splitPlusBuild(line)
+ return ok
+}
+
+// splitPlusBuild splits apart the leading // +build prefix in line from the build expression itself.
+// It returns "", false if the input is not a // +build line or if the input contains multiple lines.
+func splitPlusBuild(line string) (expr string, ok bool) {
+ // A single trailing newline is OK; otherwise multiple lines are not.
+ if len(line) > 0 && line[len(line)-1] == '\n' {
+ line = line[:len(line)-1]
+ }
+ if strings.Contains(line, "\n") {
+ return "", false
+ }
+
+ if !strings.HasPrefix(line, "//") {
+ return "", false
+ }
+ line = line[len("//"):]
+ // Note the space is optional; "//+build" is recognized too.
+ line = strings.TrimSpace(line)
+
+ if !strings.HasPrefix(line, "+build") {
+ return "", false
+ }
+ line = line[len("+build"):]
+
+ // If strings.TrimSpace finds more to trim after removing the +build prefix,
+ // it means that the prefix was followed by a space, making this a +build line
+ // (as opposed to a +buildsomethingelse line).
+ // If line is empty, we had "// +build" by itself, which also counts.
+ trim := strings.TrimSpace(line)
+ if len(line) == len(trim) && line != "" {
+ return "", false
+ }
+
+ return trim, true
+}
+
+// parsePlusBuildExpr parses a legacy build tag expression (as used with “// +build”).
+func parsePlusBuildExpr(text string) Expr {
+ var x Expr
+ for _, clause := range strings.Fields(text) {
+ var y Expr
+ for _, lit := range strings.Split(clause, ",") {
+ var z Expr
+ var neg bool
+ if strings.HasPrefix(lit, "!!") || lit == "!" {
+ z = tag("ignore")
+ } else {
+ if strings.HasPrefix(lit, "!") {
+ neg = true
+ lit = lit[len("!"):]
+ }
+ if isValidTag(lit) {
+ z = tag(lit)
+ } else {
+ z = tag("ignore")
+ }
+ if neg {
+ z = not(z)
+ }
+ }
+ if y == nil {
+ y = z
+ } else {
+ y = and(y, z)
+ }
+ }
+ if x == nil {
+ x = y
+ } else {
+ x = or(x, y)
+ }
+ }
+ if x == nil {
+ x = tag("ignore")
+ }
+ return x
+}
+
+// isValidTag reports whether the word is a valid build tag.
+// Tags must be letters, digits, underscores or dots.
+// Unlike in Go identifiers, all digits are fine (e.g., "386").
+func isValidTag(word string) bool {
+ if word == "" {
+ return false
+ }
+ for _, c := range word {
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
+ return false
+ }
+ }
+ return true
+}
+
+var errComplex = errors.New("expression too complex for // +build lines")
+
+// PlusBuildLines returns a sequence of “// +build” lines that evaluate to the build expression x.
+// If the expression is too complex to convert directly to “// +build” lines, PlusBuildLines returns an error.
+func PlusBuildLines(x Expr) ([]string, error) {
+ // Push all NOTs to the expression leaves, so that //go:build !(x && y) can be treated as !x || !y.
+ // This rewrite is both efficient and commonly needed, so it's worth doing.
+ // Essentially all other possible rewrites are too expensive and too rarely needed.
+ x = pushNot(x, false)
+
+ // Split into AND of ORs of ANDs of literals (tag or NOT tag).
+ var split [][][]Expr
+ for _, or := range appendSplitAnd(nil, x) {
+ var ands [][]Expr
+ for _, and := range appendSplitOr(nil, or) {
+ var lits []Expr
+ for _, lit := range appendSplitAnd(nil, and) {
+ switch lit.(type) {
+ case *TagExpr, *NotExpr:
+ lits = append(lits, lit)
+ default:
+ return nil, errComplex
+ }
+ }
+ ands = append(ands, lits)
+ }
+ split = append(split, ands)
+ }
+
+ // If all the ORs have length 1 (no actual OR'ing going on),
+ // push the top-level ANDs to the bottom level, so that we get
+ // one // +build line instead of many.
+ maxOr := 0
+ for _, or := range split {
+ if maxOr < len(or) {
+ maxOr = len(or)
+ }
+ }
+ if maxOr == 1 {
+ var lits []Expr
+ for _, or := range split {
+ lits = append(lits, or[0]...)
+ }
+ split = [][][]Expr{{lits}}
+ }
+
+ // Prepare the +build lines.
+ var lines []string
+ for _, or := range split {
+ line := "// +build"
+ for _, and := range or {
+ clause := ""
+ for i, lit := range and {
+ if i > 0 {
+ clause += ","
+ }
+ clause += lit.String()
+ }
+ line += " " + clause
+ }
+ lines = append(lines, line)
+ }
+
+ return lines, nil
+}
+
+// pushNot applies DeMorgan's law to push negations down the expression,
+// so that only tags are negated in the result.
+// (It applies the rewrites !(X && Y) => (!X || !Y) and !(X || Y) => (!X && !Y).)
+func pushNot(x Expr, not bool) Expr {
+ switch x := x.(type) {
+ default:
+ // unreachable
+ return x
+ case *NotExpr:
+ if _, ok := x.X.(*TagExpr); ok && !not {
+ return x
+ }
+ return pushNot(x.X, !not)
+ case *TagExpr:
+ if not {
+ return &NotExpr{X: x}
+ }
+ return x
+ case *AndExpr:
+ x1 := pushNot(x.X, not)
+ y1 := pushNot(x.Y, not)
+ if not {
+ return or(x1, y1)
+ }
+ if x1 == x.X && y1 == x.Y {
+ return x
+ }
+ return and(x1, y1)
+ case *OrExpr:
+ x1 := pushNot(x.X, not)
+ y1 := pushNot(x.Y, not)
+ if not {
+ return and(x1, y1)
+ }
+ if x1 == x.X && y1 == x.Y {
+ return x
+ }
+ return or(x1, y1)
+ }
+}
+
+// appendSplitAnd appends x to list while splitting apart any top-level && expressions.
+// For example, appendSplitAnd({W}, X && Y && Z) = {W, X, Y, Z}.
+func appendSplitAnd(list []Expr, x Expr) []Expr {
+ if x, ok := x.(*AndExpr); ok {
+ list = appendSplitAnd(list, x.X)
+ list = appendSplitAnd(list, x.Y)
+ return list
+ }
+ return append(list, x)
+}
+
+// appendSplitOr appends x to list while splitting apart any top-level || expressions.
+// For example, appendSplitOr({W}, X || Y || Z) = {W, X, Y, Z}.
+func appendSplitOr(list []Expr, x Expr) []Expr {
+ if x, ok := x.(*OrExpr); ok {
+ list = appendSplitOr(list, x.X)
+ list = appendSplitOr(list, x.Y)
+ return list
+ }
+ return append(list, x)
+}
diff --git a/src/go/build/constraint/expr_test.go b/src/go/build/constraint/expr_test.go
new file mode 100644
index 0000000..15d1890
--- /dev/null
+++ b/src/go/build/constraint/expr_test.go
@@ -0,0 +1,321 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package constraint
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+var exprStringTests = []struct {
+ x Expr
+ out string
+}{
+ {
+ x: tag("abc"),
+ out: "abc",
+ },
+ {
+ x: not(tag("abc")),
+ out: "!abc",
+ },
+ {
+ x: not(and(tag("abc"), tag("def"))),
+ out: "!(abc && def)",
+ },
+ {
+ x: and(tag("abc"), or(tag("def"), tag("ghi"))),
+ out: "abc && (def || ghi)",
+ },
+ {
+ x: or(and(tag("abc"), tag("def")), tag("ghi")),
+ out: "(abc && def) || ghi",
+ },
+}
+
+func TestExprString(t *testing.T) {
+ for i, tt := range exprStringTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ s := tt.x.String()
+ if s != tt.out {
+ t.Errorf("String() mismatch:\nhave %s\nwant %s", s, tt.out)
+ }
+ })
+ }
+}
+
+var lexTests = []struct {
+ in string
+ out string
+}{
+ {"", ""},
+ {"x", "x"},
+ {"x.y", "x.y"},
+ {"x_y", "x_y"},
+ {"αx", "αx"},
+ {"αx²", "αx err: invalid syntax at ²"},
+ {"go1.2", "go1.2"},
+ {"x y", "x y"},
+ {"x!y", "x ! y"},
+ {"&&||!()xy yx ", "&& || ! ( ) xy yx"},
+ {"x~", "x err: invalid syntax at ~"},
+ {"x ~", "x err: invalid syntax at ~"},
+ {"x &", "x err: invalid syntax at &"},
+ {"x &y", "x err: invalid syntax at &"},
+}
+
+func TestLex(t *testing.T) {
+ for i, tt := range lexTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ p := &exprParser{s: tt.in}
+ out := ""
+ for {
+ tok, err := lexHelp(p)
+ if tok == "" && err == nil {
+ break
+ }
+ if out != "" {
+ out += " "
+ }
+ if err != nil {
+ out += "err: " + err.Error()
+ break
+ }
+ out += tok
+ }
+ if out != tt.out {
+ t.Errorf("lex(%q):\nhave %s\nwant %s", tt.in, out, tt.out)
+ }
+ })
+ }
+}
+
+func lexHelp(p *exprParser) (tok string, err error) {
+ defer func() {
+ if e := recover(); e != nil {
+ if e, ok := e.(*SyntaxError); ok {
+ err = e
+ return
+ }
+ panic(e)
+ }
+ }()
+
+ p.lex()
+ return p.tok, nil
+}
+
+var parseExprTests = []struct {
+ in string
+ x Expr
+}{
+ {"x", tag("x")},
+ {"x&&y", and(tag("x"), tag("y"))},
+ {"x||y", or(tag("x"), tag("y"))},
+ {"(x)", tag("x")},
+ {"x||y&&z", or(tag("x"), and(tag("y"), tag("z")))},
+ {"x&&y||z", or(and(tag("x"), tag("y")), tag("z"))},
+ {"x&&(y||z)", and(tag("x"), or(tag("y"), tag("z")))},
+ {"(x||y)&&z", and(or(tag("x"), tag("y")), tag("z"))},
+ {"!(x&&y)", not(and(tag("x"), tag("y")))},
+}
+
+func TestParseExpr(t *testing.T) {
+ for i, tt := range parseExprTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if x.String() != tt.x.String() {
+ t.Errorf("parseExpr(%q):\nhave %s\nwant %s", tt.in, x, tt.x)
+ }
+ })
+ }
+}
+
+var parseExprErrorTests = []struct {
+ in string
+ err error
+}{
+ {"x && ", &SyntaxError{Offset: 5, Err: "unexpected end of expression"}},
+ {"x && (", &SyntaxError{Offset: 6, Err: "missing close paren"}},
+ {"x && ||", &SyntaxError{Offset: 5, Err: "unexpected token ||"}},
+ {"x && !", &SyntaxError{Offset: 6, Err: "unexpected end of expression"}},
+ {"x && !!", &SyntaxError{Offset: 6, Err: "double negation not allowed"}},
+ {"x !", &SyntaxError{Offset: 2, Err: "unexpected token !"}},
+ {"x && (y", &SyntaxError{Offset: 5, Err: "missing close paren"}},
+}
+
+func TestParseError(t *testing.T) {
+ for i, tt := range parseExprErrorTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err == nil {
+ t.Fatalf("parseExpr(%q) = %v, want error", tt.in, x)
+ }
+ if !reflect.DeepEqual(err, tt.err) {
+ t.Fatalf("parseExpr(%q): wrong error:\nhave %#v\nwant %#v", tt.in, err, tt.err)
+ }
+ })
+ }
+}
+
+var exprEvalTests = []struct {
+ in string
+ ok bool
+ tags string
+}{
+ {"x", false, "x"},
+ {"x && y", false, "x y"},
+ {"x || y", false, "x y"},
+ {"!x && yes", true, "x yes"},
+ {"yes || y", true, "y yes"},
+}
+
+func TestExprEval(t *testing.T) {
+ for i, tt := range exprEvalTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tags := make(map[string]bool)
+ wantTags := make(map[string]bool)
+ for _, tag := range strings.Fields(tt.tags) {
+ wantTags[tag] = true
+ }
+ hasTag := func(tag string) bool {
+ tags[tag] = true
+ return tag == "yes"
+ }
+ ok := x.Eval(hasTag)
+ if ok != tt.ok || !reflect.DeepEqual(tags, wantTags) {
+ t.Errorf("Eval(%#q):\nhave ok=%v, tags=%v\nwant ok=%v, tags=%v",
+ tt.in, ok, tags, tt.ok, wantTags)
+ }
+ })
+ }
+}
+
+var parsePlusBuildExprTests = []struct {
+ in string
+ x Expr
+}{
+ {"x", tag("x")},
+ {"x,y", and(tag("x"), tag("y"))},
+ {"x y", or(tag("x"), tag("y"))},
+ {"x y,z", or(tag("x"), and(tag("y"), tag("z")))},
+ {"x,y z", or(and(tag("x"), tag("y")), tag("z"))},
+ {"x,!y !z", or(and(tag("x"), not(tag("y"))), not(tag("z")))},
+ {"!! x", or(tag("ignore"), tag("x"))},
+ {"!!x", tag("ignore")},
+ {"!x", not(tag("x"))},
+ {"!", tag("ignore")},
+ {"", tag("ignore")},
+}
+
+func TestParsePlusBuildExpr(t *testing.T) {
+ for i, tt := range parsePlusBuildExprTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x := parsePlusBuildExpr(tt.in)
+ if x.String() != tt.x.String() {
+ t.Errorf("parsePlusBuildExpr(%q):\nhave %v\nwant %v", tt.in, x, tt.x)
+ }
+ })
+ }
+}
+
+var constraintTests = []struct {
+ in string
+ x Expr
+ err string
+}{
+ {"//+build !", tag("ignore"), ""},
+ {"//+build", tag("ignore"), ""},
+ {"//+build x y", or(tag("x"), tag("y")), ""},
+ {"// +build x y \n", or(tag("x"), tag("y")), ""},
+ {"// +build x y \n ", nil, "not a build constraint"},
+ {"// +build x y \nmore", nil, "not a build constraint"},
+ {" //+build x y", nil, "not a build constraint"},
+
+ {"//go:build x && y", and(tag("x"), tag("y")), ""},
+ {"//go:build x && y\n", and(tag("x"), tag("y")), ""},
+ {"//go:build x && y\n ", nil, "not a build constraint"},
+ {"//go:build x && y\nmore", nil, "not a build constraint"},
+ {" //go:build x && y", nil, "not a build constraint"},
+ {"//go:build\n", nil, "unexpected end of expression"},
+}
+
+func TestParse(t *testing.T) {
+ for i, tt := range constraintTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := Parse(tt.in)
+ if err != nil {
+ if tt.err == "" {
+ t.Errorf("Constraint(%q): unexpected error: %v", tt.in, err)
+ } else if !strings.Contains(err.Error(), tt.err) {
+ t.Errorf("Constraint(%q): error %v, want %v", tt.in, err, tt.err)
+ }
+ return
+ }
+ if tt.err != "" {
+ t.Errorf("Constraint(%q) = %v, want error %v", tt.in, x, tt.err)
+ return
+ }
+ if x.String() != tt.x.String() {
+ t.Errorf("Constraint(%q):\nhave %v\nwant %v", tt.in, x, tt.x)
+ }
+ })
+ }
+}
+
+var plusBuildLinesTests = []struct {
+ in string
+ out []string
+ err error
+}{
+ {"x", []string{"x"}, nil},
+ {"x && !y", []string{"x,!y"}, nil},
+ {"x || y", []string{"x y"}, nil},
+ {"x && (y || z)", []string{"x", "y z"}, nil},
+ {"!(x && y)", []string{"!x !y"}, nil},
+ {"x || (y && z)", []string{"x y,z"}, nil},
+ {"w && (x || (y && z))", []string{"w", "x y,z"}, nil},
+ {"v || (w && (x || (y && z)))", nil, errComplex},
+}
+
+func TestPlusBuildLines(t *testing.T) {
+ for i, tt := range plusBuildLinesTests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ x, err := parseExpr(tt.in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ lines, err := PlusBuildLines(x)
+ if err != nil {
+ if tt.err == nil {
+ t.Errorf("PlusBuildLines(%q): unexpected error: %v", tt.in, err)
+ } else if tt.err != err {
+ t.Errorf("PlusBuildLines(%q): error %v, want %v", tt.in, err, tt.err)
+ }
+ return
+ }
+ if tt.err != nil {
+ t.Errorf("PlusBuildLines(%q) = %v, want error %v", tt.in, lines, tt.err)
+ return
+ }
+ var want []string
+ for _, line := range tt.out {
+ want = append(want, "// +build "+line)
+ }
+ if !reflect.DeepEqual(lines, want) {
+ t.Errorf("PlusBuildLines(%q):\nhave %q\nwant %q", tt.in, lines, want)
+ }
+ })
+ }
+}
diff --git a/src/go/build/deps_test.go b/src/go/build/deps_test.go
new file mode 100644
index 0000000..1ec6ee7
--- /dev/null
+++ b/src/go/build/deps_test.go
@@ -0,0 +1,774 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file exercises the import parser but also checks that
+// some low-level packages do not have new dependencies added.
+
+package build
+
+import (
+ "bytes"
+ "fmt"
+ "go/token"
+ "internal/dag"
+ "internal/testenv"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sort"
+ "strings"
+ "testing"
+)
+
+// depsRules defines the expected dependencies between packages in
+// the Go source tree. It is a statement of policy.
+//
+// DO NOT CHANGE THIS DATA TO FIX BUILDS.
+// Existing packages should not have their constraints relaxed
+// without prior discussion.
+// Negative assertions should almost never be removed.
+//
+// "a < b" means package b can import package a.
+//
+// See `go doc internal/dag' for the full syntax.
+//
+// All-caps names are pseudo-names for specific points
+// in the dependency lattice.
+var depsRules = `
+ # No dependencies allowed for any of these packages.
+ NONE
+ < constraints, container/list, container/ring,
+ internal/cfg, internal/coverage, internal/coverage/rtcov,
+ internal/coverage/uleb128, internal/coverage/calloc,
+ internal/cpu, internal/goarch,
+ internal/goexperiment, internal/goos,
+ internal/goversion, internal/nettrace, internal/platform,
+ unicode/utf8, unicode/utf16, unicode,
+ unsafe;
+
+ # These packages depend only on internal/goarch and unsafe.
+ internal/goarch, unsafe
+ < internal/abi;
+
+ # RUNTIME is the core runtime group of packages, all of them very light-weight.
+ internal/abi, internal/cpu, internal/goarch,
+ internal/coverage/rtcov, internal/goexperiment,
+ internal/goos, unsafe
+ < internal/bytealg
+ < internal/itoa
+ < internal/unsafeheader
+ < runtime/internal/sys
+ < runtime/internal/syscall
+ < runtime/internal/atomic
+ < runtime/internal/math
+ < runtime
+ < sync/atomic
+ < internal/race
+ < sync
+ < internal/godebug
+ < internal/reflectlite
+ < errors
+ < internal/oserror, math/bits
+ < RUNTIME;
+
+ RUNTIME
+ < sort
+ < container/heap;
+
+ RUNTIME
+ < io;
+
+ RUNTIME
+ < arena;
+
+ syscall !< io;
+ reflect !< sort;
+
+ RUNTIME, unicode/utf8
+ < path;
+
+ unicode !< path;
+
+ # SYSCALL is RUNTIME plus the packages necessary for basic system calls.
+ RUNTIME, unicode/utf8, unicode/utf16
+ < internal/syscall/windows/sysdll, syscall/js
+ < syscall
+ < internal/syscall/unix, internal/syscall/windows, internal/syscall/windows/registry
+ < internal/syscall/execenv
+ < SYSCALL;
+
+ # TIME is SYSCALL plus the core packages about time, including context.
+ SYSCALL
+ < time/tzdata
+ < time
+ < context
+ < TIME;
+
+ TIME, io, path, sort
+ < io/fs;
+
+ # MATH is RUNTIME plus the basic math packages.
+ RUNTIME
+ < math
+ < MATH;
+
+ unicode !< math;
+
+ MATH
+ < math/cmplx;
+
+ MATH
+ < math/rand;
+
+ MATH
+ < runtime/metrics;
+
+ MATH, unicode/utf8
+ < strconv;
+
+ unicode !< strconv;
+
+ # STR is basic string and buffer manipulation.
+ RUNTIME, io, unicode/utf8, unicode/utf16, unicode
+ < bytes, strings
+ < bufio;
+
+ bufio, path, strconv
+ < STR;
+
+ # OS is basic OS access, including helpers (path/filepath, os/exec, etc).
+ # OS includes string routines, but those must be layered above package os.
+ # OS does not include reflection.
+ io/fs
+ < internal/testlog
+ < internal/poll
+ < internal/safefilepath
+ < os
+ < os/signal;
+
+ io/fs
+ < embed;
+
+ unicode, fmt !< net, os, os/signal;
+
+ os/signal, internal/safefilepath, STR
+ < path/filepath
+ < io/ioutil;
+
+ path/filepath, internal/godebug < os/exec;
+
+ io/ioutil, os/exec, os/signal
+ < OS;
+
+ reflect !< OS;
+
+ OS
+ < golang.org/x/sys/cpu;
+
+ # FMT is OS (which includes string routines) plus reflect and fmt.
+ # It does not include package log, which should be avoided in core packages.
+ arena, strconv, unicode
+ < reflect;
+
+ os, reflect
+ < internal/fmtsort
+ < fmt;
+
+ OS, fmt
+ < FMT;
+
+ log !< FMT;
+
+ # Misc packages needing only FMT.
+ FMT
+ < html,
+ internal/dag,
+ internal/goroot,
+ internal/types/errors,
+ mime/quotedprintable,
+ net/internal/socktest,
+ net/url,
+ runtime/trace,
+ text/scanner,
+ text/tabwriter;
+
+ io, reflect
+ < internal/saferio;
+
+ # encodings
+ # core ones do not use fmt.
+ io, strconv
+ < encoding;
+
+ encoding, reflect
+ < encoding/binary
+ < encoding/base32, encoding/base64;
+
+ FMT, encoding < flag;
+
+ fmt !< encoding/base32, encoding/base64;
+
+ FMT, encoding/base32, encoding/base64, internal/saferio
+ < encoding/ascii85, encoding/csv, encoding/gob, encoding/hex,
+ encoding/json, encoding/pem, encoding/xml, mime;
+
+ # hashes
+ io
+ < hash
+ < hash/adler32, hash/crc32, hash/crc64, hash/fnv, hash/maphash;
+
+ # math/big
+ FMT, encoding/binary, math/rand
+ < math/big;
+
+ # compression
+ FMT, encoding/binary, hash/adler32, hash/crc32
+ < compress/bzip2, compress/flate, compress/lzw
+ < archive/zip, compress/gzip, compress/zlib;
+
+ # templates
+ FMT
+ < text/template/parse;
+
+ net/url, text/template/parse
+ < text/template
+ < internal/lazytemplate;
+
+ # regexp
+ FMT
+ < regexp/syntax
+ < regexp
+ < internal/lazyregexp;
+
+ encoding/json, html, text/template, regexp
+ < html/template;
+
+ # suffix array
+ encoding/binary, regexp
+ < index/suffixarray;
+
+ # executable parsing
+ FMT, encoding/binary, compress/zlib, internal/saferio
+ < runtime/debug
+ < debug/dwarf
+ < debug/elf, debug/gosym, debug/macho, debug/pe, debug/plan9obj, internal/xcoff
+ < debug/buildinfo
+ < DEBUG;
+
+ # go parser and friends.
+ FMT
+ < go/token
+ < go/scanner
+ < go/ast
+ < go/internal/typeparams
+ < go/parser;
+
+ FMT
+ < go/build/constraint, go/doc/comment;
+
+ go/build/constraint, go/doc/comment, go/parser, text/tabwriter
+ < go/printer
+ < go/format;
+
+ go/doc/comment, go/parser, internal/lazyregexp, text/template
+ < go/doc;
+
+ math/big, go/token
+ < go/constant;
+
+ container/heap, go/constant, go/parser, internal/types/errors, regexp
+ < go/types;
+
+ FMT, internal/goexperiment
+ < internal/buildcfg;
+
+ go/build/constraint, go/doc, go/parser, internal/buildcfg, internal/goroot, internal/goversion
+ < go/build;
+
+ # databases
+ FMT
+ < database/sql/internal
+ < database/sql/driver
+ < database/sql;
+
+ # images
+ FMT, compress/lzw, compress/zlib
+ < image/color
+ < image, image/color/palette
+ < image/internal/imageutil
+ < image/draw
+ < image/gif, image/jpeg, image/png;
+
+ # cgo, delayed as long as possible.
+ # If you add a dependency on CGO, you must add the package
+ # to cgoPackages in cmd/dist/test.go as well.
+ RUNTIME
+ < C
+ < runtime/cgo
+ < CGO
+ < runtime/msan, runtime/asan;
+
+ # runtime/race
+ NONE < runtime/race/internal/amd64v1;
+ NONE < runtime/race/internal/amd64v3;
+ CGO, runtime/race/internal/amd64v1, runtime/race/internal/amd64v3 < runtime/race;
+
+ # Bulk of the standard library must not use cgo.
+ # The prohibition stops at net and os/user.
+ C !< fmt, go/types, CRYPTO-MATH;
+
+ CGO, OS
+ < plugin;
+
+ CGO, FMT
+ < os/user
+ < archive/tar;
+
+ sync
+ < internal/singleflight;
+
+ os
+ < golang.org/x/net/dns/dnsmessage,
+ golang.org/x/net/lif,
+ golang.org/x/net/route;
+
+ os, runtime, strconv, sync, unsafe,
+ internal/godebug
+ < internal/intern;
+
+ internal/bytealg, internal/intern, internal/itoa, math/bits, sort, strconv
+ < net/netip;
+
+ # net is unavoidable when doing any networking,
+ # so large dependencies must be kept out.
+ # This is a long-looking list but most of these
+ # are small with few dependencies.
+ CGO,
+ golang.org/x/net/dns/dnsmessage,
+ golang.org/x/net/lif,
+ golang.org/x/net/route,
+ internal/godebug,
+ internal/nettrace,
+ internal/poll,
+ internal/singleflight,
+ internal/race,
+ net/netip,
+ os
+ < net;
+
+ fmt, unicode !< net;
+ math/rand !< net; # net uses runtime instead
+
+ # NET is net plus net-helper packages.
+ FMT, net
+ < net/textproto;
+
+ mime, net/textproto, net/url
+ < NET;
+
+ # logging - most packages should not import; http and up is allowed
+ FMT
+ < log;
+
+ log !< crypto/tls, database/sql, go/importer, testing;
+
+ FMT, log, net
+ < log/syslog;
+
+ NET, log
+ < net/mail;
+
+ NONE < crypto/internal/boring/sig, crypto/internal/boring/syso;
+ sync/atomic < crypto/internal/boring/bcache, crypto/internal/boring/fipstls;
+ crypto/internal/boring/sig, crypto/internal/boring/fipstls < crypto/tls/fipsonly;
+
+ # CRYPTO is core crypto algorithms - no cgo, fmt, net.
+ # Unfortunately, stuck with reflect via encoding/binary.
+ crypto/internal/boring/sig,
+ crypto/internal/boring/syso,
+ encoding/binary,
+ golang.org/x/sys/cpu,
+ hash, embed
+ < crypto
+ < crypto/subtle
+ < crypto/internal/alias
+ < crypto/cipher;
+
+ crypto/cipher,
+ crypto/internal/boring/bcache
+ < crypto/internal/boring
+ < crypto/boring;
+
+ crypto/internal/alias
+ < crypto/internal/randutil
+ < crypto/internal/nistec/fiat
+ < crypto/internal/nistec
+ < crypto/internal/edwards25519/field
+ < crypto/internal/edwards25519;
+
+ crypto/boring
+ < crypto/aes, crypto/des, crypto/hmac, crypto/md5, crypto/rc4,
+ crypto/sha1, crypto/sha256, crypto/sha512;
+
+ crypto/boring, crypto/internal/edwards25519/field
+ < crypto/ecdh;
+
+ crypto/aes,
+ crypto/des,
+ crypto/ecdh,
+ crypto/hmac,
+ crypto/internal/edwards25519,
+ crypto/md5,
+ crypto/rc4,
+ crypto/sha1,
+ crypto/sha256,
+ crypto/sha512
+ < CRYPTO;
+
+ CGO, fmt, net !< CRYPTO;
+
+ # CRYPTO-MATH is core bignum-based crypto - no cgo, net; fmt now ok.
+ CRYPTO, FMT, math/big
+ < crypto/internal/boring/bbig
+ < crypto/rand
+ < crypto/ed25519
+ < encoding/asn1
+ < golang.org/x/crypto/cryptobyte/asn1
+ < golang.org/x/crypto/cryptobyte
+ < crypto/internal/bigmod
+ < crypto/dsa, crypto/elliptic, crypto/rsa
+ < crypto/ecdsa
+ < CRYPTO-MATH;
+
+ CGO, net !< CRYPTO-MATH;
+
+ # TLS, Prince of Dependencies.
+ CRYPTO-MATH, NET, container/list, encoding/hex, encoding/pem
+ < golang.org/x/crypto/internal/alias
+ < golang.org/x/crypto/internal/subtle
+ < golang.org/x/crypto/chacha20
+ < golang.org/x/crypto/internal/poly1305
+ < golang.org/x/crypto/chacha20poly1305
+ < golang.org/x/crypto/hkdf
+ < crypto/x509/internal/macos
+ < crypto/x509/pkix;
+
+ crypto/internal/boring/fipstls, crypto/x509/pkix
+ < crypto/x509
+ < crypto/tls;
+
+ # crypto-aware packages
+
+ DEBUG, go/build, go/types, text/scanner, crypto/md5
+ < internal/pkgbits
+ < go/internal/gcimporter, go/internal/gccgoimporter, go/internal/srcimporter
+ < go/importer;
+
+ NET, crypto/rand, mime/quotedprintable
+ < mime/multipart;
+
+ crypto/tls
+ < net/smtp;
+
+ # HTTP, King of Dependencies.
+
+ FMT
+ < golang.org/x/net/http2/hpack
+ < net/http/internal, net/http/internal/ascii, net/http/internal/testcert;
+
+ FMT, NET, container/list, encoding/binary, log
+ < golang.org/x/text/transform
+ < golang.org/x/text/unicode/norm
+ < golang.org/x/text/unicode/bidi
+ < golang.org/x/text/secure/bidirule
+ < golang.org/x/net/idna
+ < golang.org/x/net/http/httpguts, golang.org/x/net/http/httpproxy;
+
+ NET, crypto/tls
+ < net/http/httptrace;
+
+ compress/gzip,
+ golang.org/x/net/http/httpguts,
+ golang.org/x/net/http/httpproxy,
+ golang.org/x/net/http2/hpack,
+ net/http/internal,
+ net/http/internal/ascii,
+ net/http/internal/testcert,
+ net/http/httptrace,
+ mime/multipart,
+ log
+ < net/http;
+
+ # HTTP-aware packages
+
+ encoding/json, net/http
+ < expvar;
+
+ net/http, net/http/internal/ascii
+ < net/http/cookiejar, net/http/httputil;
+
+ net/http, flag
+ < net/http/httptest;
+
+ net/http, regexp
+ < net/http/cgi
+ < net/http/fcgi;
+
+ # Profiling
+ FMT, compress/gzip, encoding/binary, text/tabwriter
+ < runtime/pprof;
+
+ OS, compress/gzip, regexp
+ < internal/profile;
+
+ html, internal/profile, net/http, runtime/pprof, runtime/trace
+ < net/http/pprof;
+
+ # RPC
+ encoding/gob, encoding/json, go/token, html/template, net/http
+ < net/rpc
+ < net/rpc/jsonrpc;
+
+ # System Information
+ internal/cpu, sync
+ < internal/sysinfo;
+
+ # Test-only
+ log
+ < testing/iotest
+ < testing/fstest;
+
+ FMT, flag, math/rand
+ < testing/quick;
+
+ FMT, DEBUG, flag, runtime/trace, internal/sysinfo, math/rand
+ < testing;
+
+ FMT, crypto/sha256, encoding/json, go/ast, go/parser, go/token,
+ internal/godebug, math/rand, encoding/hex, crypto/sha256
+ < internal/fuzz;
+
+ internal/fuzz, internal/testlog, runtime/pprof, regexp
+ < testing/internal/testdeps;
+
+ OS, flag, testing, internal/cfg, internal/platform, internal/goroot
+ < internal/testenv;
+
+ OS, encoding/base64
+ < internal/obscuretestdata;
+
+ CGO, OS, fmt
+ < internal/testpty;
+
+ NET, testing, math/rand
+ < golang.org/x/net/nettest;
+
+ syscall
+ < os/exec/internal/fdtest;
+
+ FMT, container/heap, math/rand
+ < internal/trace;
+
+ FMT
+ < internal/diff, internal/txtar;
+
+ FMT, crypto/md5, encoding/binary, regexp, sort, text/tabwriter, unsafe,
+ internal/coverage, internal/coverage/uleb128
+ < internal/coverage/cmerge,
+ internal/coverage/pods,
+ internal/coverage/slicereader,
+ internal/coverage/slicewriter;
+
+ internal/coverage/slicereader, internal/coverage/slicewriter
+ < internal/coverage/stringtab
+ < internal/coverage/decodecounter, internal/coverage/decodemeta,
+ internal/coverage/encodecounter, internal/coverage/encodemeta;
+
+ internal/coverage/cmerge
+ < internal/coverage/cformat;
+
+ runtime/debug,
+ internal/coverage/calloc,
+ internal/coverage/cformat,
+ internal/coverage/decodecounter, internal/coverage/decodemeta,
+ internal/coverage/encodecounter, internal/coverage/encodemeta,
+ internal/coverage/pods
+ < runtime/coverage;
+`
+
+// listStdPkgs returns the same list of packages as "go list std".
+func listStdPkgs(goroot string) ([]string, error) {
+ // Based on cmd/go's matchPackages function.
+ var pkgs []string
+
+ src := filepath.Join(goroot, "src") + string(filepath.Separator)
+ walkFn := func(path string, d fs.DirEntry, err error) error {
+ if err != nil || !d.IsDir() || path == src {
+ return nil
+ }
+
+ base := filepath.Base(path)
+ if strings.HasPrefix(base, ".") || strings.HasPrefix(base, "_") || base == "testdata" {
+ return filepath.SkipDir
+ }
+
+ name := filepath.ToSlash(path[len(src):])
+ if name == "builtin" || name == "cmd" {
+ return filepath.SkipDir
+ }
+
+ pkgs = append(pkgs, strings.TrimPrefix(name, "vendor/"))
+ return nil
+ }
+ if err := filepath.WalkDir(src, walkFn); err != nil {
+ return nil, err
+ }
+ return pkgs, nil
+}
+
+func TestDependencies(t *testing.T) {
+ if !testenv.HasSrc() {
+ // Tests run in a limited file system and we do not
+ // provide access to every source file.
+ t.Skipf("skipping on %s/%s, missing full GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ ctxt := Default
+ all, err := listStdPkgs(ctxt.GOROOT)
+ if err != nil {
+ t.Fatal(err)
+ }
+ sort.Strings(all)
+
+ sawImport := map[string]map[string]bool{} // from package => to package => true
+ policy := depsPolicy(t)
+
+ for _, pkg := range all {
+ imports, err := findImports(pkg)
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+ if sawImport[pkg] == nil {
+ sawImport[pkg] = map[string]bool{}
+ }
+ var bad []string
+ for _, imp := range imports {
+ sawImport[pkg][imp] = true
+ if !policy.HasEdge(pkg, imp) {
+ bad = append(bad, imp)
+ }
+ }
+ if bad != nil {
+ t.Errorf("unexpected dependency: %s imports %v", pkg, bad)
+ }
+ }
+}
+
+var buildIgnore = []byte("\n//go:build ignore")
+
+func findImports(pkg string) ([]string, error) {
+ vpkg := pkg
+ if strings.HasPrefix(pkg, "golang.org") {
+ vpkg = "vendor/" + pkg
+ }
+ dir := filepath.Join(Default.GOROOT, "src", vpkg)
+ files, err := os.ReadDir(dir)
+ if err != nil {
+ return nil, err
+ }
+ var imports []string
+ var haveImport = map[string]bool{}
+ if pkg == "crypto/internal/boring" {
+ haveImport["C"] = true // kludge: prevent C from appearing in crypto/internal/boring imports
+ }
+ fset := token.NewFileSet()
+ for _, file := range files {
+ name := file.Name()
+ if name == "slice_go14.go" || name == "slice_go18.go" {
+ // These files are for compiler bootstrap with older versions of Go and not built in the standard build.
+ continue
+ }
+ if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ info := fileInfo{
+ name: filepath.Join(dir, name),
+ fset: fset,
+ }
+ f, err := os.Open(info.name)
+ if err != nil {
+ return nil, err
+ }
+ err = readGoInfo(f, &info)
+ f.Close()
+ if err != nil {
+ return nil, fmt.Errorf("reading %v: %v", name, err)
+ }
+ if info.parsed.Name.Name == "main" {
+ continue
+ }
+ if bytes.Contains(info.header, buildIgnore) {
+ continue
+ }
+ for _, imp := range info.imports {
+ path := imp.path
+ if !haveImport[path] {
+ haveImport[path] = true
+ imports = append(imports, path)
+ }
+ }
+ }
+ sort.Strings(imports)
+ return imports, nil
+}
+
+// depsPolicy returns a map m such that m[p][d] == true when p can import d.
+func depsPolicy(t *testing.T) *dag.Graph {
+ g, err := dag.Parse(depsRules)
+ if err != nil {
+ t.Fatal(err)
+ }
+ return g
+}
+
+// TestStdlibLowercase tests that all standard library package names are
+// lowercase. See Issue 40065.
+func TestStdlibLowercase(t *testing.T) {
+ if !testenv.HasSrc() {
+ t.Skipf("skipping on %s/%s, missing full GOROOT", runtime.GOOS, runtime.GOARCH)
+ }
+
+ ctxt := Default
+ all, err := listStdPkgs(ctxt.GOROOT)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ for _, pkgname := range all {
+ if strings.ToLower(pkgname) != pkgname {
+ t.Errorf("package %q should not use upper-case path", pkgname)
+ }
+ }
+}
+
+// TestFindImports tests that findImports works. See #43249.
+func TestFindImports(t *testing.T) {
+ imports, err := findImports("go/build")
+ if err != nil {
+ t.Fatal(err)
+ }
+ t.Logf("go/build imports %q", imports)
+ want := []string{"bytes", "os", "path/filepath", "strings"}
+wantLoop:
+ for _, w := range want {
+ for _, imp := range imports {
+ if imp == w {
+ continue wantLoop
+ }
+ }
+ t.Errorf("expected to find %q in import list", w)
+ }
+}
diff --git a/src/go/build/doc.go b/src/go/build/doc.go
new file mode 100644
index 0000000..cd1d3fd
--- /dev/null
+++ b/src/go/build/doc.go
@@ -0,0 +1,98 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package build gathers information about Go packages.
+//
+// # Go Path
+//
+// The Go path is a list of directory trees containing Go source code.
+// It is consulted to resolve imports that cannot be found in the standard
+// Go tree. The default path is the value of the GOPATH environment
+// variable, interpreted as a path list appropriate to the operating system
+// (on Unix, the variable is a colon-separated string;
+// on Windows, a semicolon-separated string;
+// on Plan 9, a list).
+//
+// Each directory listed in the Go path must have a prescribed structure:
+//
+// The src/ directory holds source code. The path below 'src' determines
+// the import path or executable name.
+//
+// The pkg/ directory holds installed package objects.
+// As in the Go tree, each target operating system and
+// architecture pair has its own subdirectory of pkg
+// (pkg/GOOS_GOARCH).
+//
+// If DIR is a directory listed in the Go path, a package with
+// source in DIR/src/foo/bar can be imported as "foo/bar" and
+// has its compiled form installed to "DIR/pkg/GOOS_GOARCH/foo/bar.a"
+// (or, for gccgo, "DIR/pkg/gccgo/foo/libbar.a").
+//
+// The bin/ directory holds compiled commands.
+// Each command is named for its source directory, but only
+// using the final element, not the entire path. That is, the
+// command with source in DIR/src/foo/quux is installed into
+// DIR/bin/quux, not DIR/bin/foo/quux. The foo/ is stripped
+// so that you can add DIR/bin to your PATH to get at the
+// installed commands.
+//
+// Here's an example directory layout:
+//
+// GOPATH=/home/user/gocode
+//
+// /home/user/gocode/
+// src/
+// foo/
+// bar/ (go code in package bar)
+// x.go
+// quux/ (go code in package main)
+// y.go
+// bin/
+// quux (installed command)
+// pkg/
+// linux_amd64/
+// foo/
+// bar.a (installed package object)
+//
+// # Build Constraints
+//
+// A build constraint, also known as a build tag, is a condition under which a
+// file should be included in the package. Build constraints are given by a
+// line comment that begins
+//
+// //go:build
+//
+// Build constraints may also be part of a file's name
+// (for example, source_windows.go will only be included if the target
+// operating system is windows).
+//
+// See 'go help buildconstraint'
+// (https://golang.org/cmd/go/#hdr-Build_constraints) for details.
+//
+// # Binary-Only Packages
+//
+// In Go 1.12 and earlier, it was possible to distribute packages in binary
+// form without including the source code used for compiling the package.
+// The package was distributed with a source file not excluded by build
+// constraints and containing a "//go:binary-only-package" comment. Like a
+// build constraint, this comment appeared at the top of a file, preceded
+// only by blank lines and other line comments and with a blank line
+// following the comment, to separate it from the package documentation.
+// Unlike build constraints, this comment is only recognized in non-test
+// Go source files.
+//
+// The minimal source code for a binary-only package was therefore:
+//
+// //go:binary-only-package
+//
+// package mypkg
+//
+// The source code could include additional Go code. That code was never
+// compiled but would be processed by tools like godoc and might be useful
+// as end-user documentation.
+//
+// "go build" and other commands no longer support binary-only-packages.
+// Import and ImportDir will still set the BinaryOnly flag in packages
+// containing these comments for use in tools and error messages.
+package build
diff --git a/src/go/build/gc.go b/src/go/build/gc.go
new file mode 100644
index 0000000..434991f
--- /dev/null
+++ b/src/go/build/gc.go
@@ -0,0 +1,17 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build gc
+
+package build
+
+import (
+ "path/filepath"
+ "runtime"
+)
+
+// getToolDir returns the default value of ToolDir.
+func getToolDir() string {
+ return filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)
+}
diff --git a/src/go/build/gccgo.go b/src/go/build/gccgo.go
new file mode 100644
index 0000000..f806729
--- /dev/null
+++ b/src/go/build/gccgo.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build gccgo
+
+package build
+
+import "runtime"
+
+// getToolDir returns the default value of ToolDir.
+func getToolDir() string {
+ return envOr("GCCGOTOOLDIR", runtime.GCCGOTOOLDIR)
+}
diff --git a/src/go/build/read.go b/src/go/build/read.go
new file mode 100644
index 0000000..adcf82f
--- /dev/null
+++ b/src/go/build/read.go
@@ -0,0 +1,600 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "io"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+type importReader struct {
+ b *bufio.Reader
+ buf []byte
+ peek byte
+ err error
+ eof bool
+ nerr int
+ pos token.Position
+}
+
+var bom = []byte{0xef, 0xbb, 0xbf}
+
+func newImportReader(name string, r io.Reader) *importReader {
+ b := bufio.NewReader(r)
+ // Remove leading UTF-8 BOM.
+ // Per https://golang.org/ref/spec#Source_code_representation:
+ // a compiler may ignore a UTF-8-encoded byte order mark (U+FEFF)
+ // if it is the first Unicode code point in the source text.
+ if leadingBytes, err := b.Peek(3); err == nil && bytes.Equal(leadingBytes, bom) {
+ b.Discard(3)
+ }
+ return &importReader{
+ b: b,
+ pos: token.Position{
+ Filename: name,
+ Line: 1,
+ Column: 1,
+ },
+ }
+}
+
+func isIdent(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '_' || c >= utf8.RuneSelf
+}
+
+var (
+ errSyntax = errors.New("syntax error")
+ errNUL = errors.New("unexpected NUL in input")
+)
+
+// syntaxError records a syntax error, but only if an I/O error has not already been recorded.
+func (r *importReader) syntaxError() {
+ if r.err == nil {
+ r.err = errSyntax
+ }
+}
+
+// readByte reads the next byte from the input, saves it in buf, and returns it.
+// If an error occurs, readByte records the error in r.err and returns 0.
+func (r *importReader) readByte() byte {
+ c, err := r.b.ReadByte()
+ if err == nil {
+ r.buf = append(r.buf, c)
+ if c == 0 {
+ err = errNUL
+ }
+ }
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ c = 0
+ }
+ return c
+}
+
+// readByteNoBuf is like readByte but doesn't buffer the byte.
+// It exhausts r.buf before reading from r.b.
+func (r *importReader) readByteNoBuf() byte {
+ var c byte
+ var err error
+ if len(r.buf) > 0 {
+ c = r.buf[0]
+ r.buf = r.buf[1:]
+ } else {
+ c, err = r.b.ReadByte()
+ if err == nil && c == 0 {
+ err = errNUL
+ }
+ }
+
+ if err != nil {
+ if err == io.EOF {
+ r.eof = true
+ } else if r.err == nil {
+ r.err = err
+ }
+ return 0
+ }
+ r.pos.Offset++
+ if c == '\n' {
+ r.pos.Line++
+ r.pos.Column = 1
+ } else {
+ r.pos.Column++
+ }
+ return c
+}
+
+// peekByte returns the next byte from the input reader but does not advance beyond it.
+// If skipSpace is set, peekByte skips leading spaces and comments.
+func (r *importReader) peekByte(skipSpace bool) byte {
+ if r.err != nil {
+ if r.nerr++; r.nerr > 10000 {
+ panic("go/build: import reader looping")
+ }
+ return 0
+ }
+
+ // Use r.peek as first input byte.
+ // Don't just return r.peek here: it might have been left by peekByte(false)
+ // and this might be peekByte(true).
+ c := r.peek
+ if c == 0 {
+ c = r.readByte()
+ }
+ for r.err == nil && !r.eof {
+ if skipSpace {
+ // For the purposes of this reader, semicolons are never necessary to
+ // understand the input and are treated as spaces.
+ switch c {
+ case ' ', '\f', '\t', '\r', '\n', ';':
+ c = r.readByte()
+ continue
+
+ case '/':
+ c = r.readByte()
+ if c == '/' {
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByte()
+ }
+ } else if c == '*' {
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByte()
+ }
+ } else {
+ r.syntaxError()
+ }
+ c = r.readByte()
+ continue
+ }
+ }
+ break
+ }
+ r.peek = c
+ return r.peek
+}
+
+// nextByte is like peekByte but advances beyond the returned byte.
+func (r *importReader) nextByte(skipSpace bool) byte {
+ c := r.peekByte(skipSpace)
+ r.peek = 0
+ return c
+}
+
+var goEmbed = []byte("go:embed")
+
+// findEmbed advances the input reader to the next //go:embed comment.
+// It reports whether it found a comment.
+// (Otherwise it found an error or EOF.)
+func (r *importReader) findEmbed(first bool) bool {
+ // The import block scan stopped after a non-space character,
+ // so the reader is not at the start of a line on the first call.
+ // After that, each //go:embed extraction leaves the reader
+ // at the end of a line.
+ startLine := !first
+ var c byte
+ for r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ Reswitch:
+ switch c {
+ default:
+ startLine = false
+
+ case '\n':
+ startLine = true
+
+ case ' ', '\t':
+ // leave startLine alone
+
+ case '"':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '\\' {
+ r.readByteNoBuf()
+ if r.err != nil {
+ r.syntaxError()
+ return false
+ }
+ continue
+ }
+ if c == '"' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+ goto Reswitch
+
+ case '`':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '`' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+
+ case '\'':
+ startLine = false
+ for r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c = r.readByteNoBuf()
+ if c == '\\' {
+ r.readByteNoBuf()
+ if r.err != nil {
+ r.syntaxError()
+ return false
+ }
+ continue
+ }
+ if c == '\'' {
+ c = r.readByteNoBuf()
+ goto Reswitch
+ }
+ }
+
+ case '/':
+ c = r.readByteNoBuf()
+ switch c {
+ default:
+ startLine = false
+ goto Reswitch
+
+ case '*':
+ var c1 byte
+ for (c != '*' || c1 != '/') && r.err == nil {
+ if r.eof {
+ r.syntaxError()
+ }
+ c, c1 = c1, r.readByteNoBuf()
+ }
+ startLine = false
+
+ case '/':
+ if startLine {
+ // Try to read this as a //go:embed comment.
+ for i := range goEmbed {
+ c = r.readByteNoBuf()
+ if c != goEmbed[i] {
+ goto SkipSlashSlash
+ }
+ }
+ c = r.readByteNoBuf()
+ if c == ' ' || c == '\t' {
+ // Found one!
+ return true
+ }
+ }
+ SkipSlashSlash:
+ for c != '\n' && r.err == nil && !r.eof {
+ c = r.readByteNoBuf()
+ }
+ startLine = true
+ }
+ }
+ }
+ return false
+}
+
+// readKeyword reads the given keyword from the input.
+// If the keyword is not present, readKeyword records a syntax error.
+func (r *importReader) readKeyword(kw string) {
+ r.peekByte(true)
+ for i := 0; i < len(kw); i++ {
+ if r.nextByte(false) != kw[i] {
+ r.syntaxError()
+ return
+ }
+ }
+ if isIdent(r.peekByte(false)) {
+ r.syntaxError()
+ }
+}
+
+// readIdent reads an identifier from the input.
+// If an identifier is not present, readIdent records a syntax error.
+func (r *importReader) readIdent() {
+ c := r.peekByte(true)
+ if !isIdent(c) {
+ r.syntaxError()
+ return
+ }
+ for isIdent(r.peekByte(false)) {
+ r.peek = 0
+ }
+}
+
+// readString reads a quoted string literal from the input.
+// If an identifier is not present, readString records a syntax error.
+func (r *importReader) readString() {
+ switch r.nextByte(true) {
+ case '`':
+ for r.err == nil {
+ if r.nextByte(false) == '`' {
+ break
+ }
+ if r.eof {
+ r.syntaxError()
+ }
+ }
+ case '"':
+ for r.err == nil {
+ c := r.nextByte(false)
+ if c == '"' {
+ break
+ }
+ if r.eof || c == '\n' {
+ r.syntaxError()
+ }
+ if c == '\\' {
+ r.nextByte(false)
+ }
+ }
+ default:
+ r.syntaxError()
+ }
+}
+
+// readImport reads an import clause - optional identifier followed by quoted string -
+// from the input.
+func (r *importReader) readImport() {
+ c := r.peekByte(true)
+ if c == '.' {
+ r.peek = 0
+ } else if isIdent(c) {
+ r.readIdent()
+ }
+ r.readString()
+}
+
+// readComments is like io.ReadAll, except that it only reads the leading
+// block of comments in the file.
+func readComments(f io.Reader) ([]byte, error) {
+ r := newImportReader("", f)
+ r.peekByte(true)
+ if r.err == nil && !r.eof {
+ // Didn't reach EOF, so must have found a non-space byte. Remove it.
+ r.buf = r.buf[:len(r.buf)-1]
+ }
+ return r.buf, r.err
+}
+
+// readGoInfo expects a Go file as input and reads the file up to and including the import section.
+// It records what it learned in *info.
+// If info.fset is non-nil, readGoInfo parses the file and sets info.parsed, info.parseErr,
+// info.imports and info.embeds.
+//
+// It only returns an error if there are problems reading the file,
+// not for syntax errors in the file itself.
+func readGoInfo(f io.Reader, info *fileInfo) error {
+ r := newImportReader(info.name, f)
+
+ r.readKeyword("package")
+ r.readIdent()
+ for r.peekByte(true) == 'i' {
+ r.readKeyword("import")
+ if r.peekByte(true) == '(' {
+ r.nextByte(false)
+ for r.peekByte(true) != ')' && r.err == nil {
+ r.readImport()
+ }
+ r.nextByte(false)
+ } else {
+ r.readImport()
+ }
+ }
+
+ info.header = r.buf
+
+ // If we stopped successfully before EOF, we read a byte that told us we were done.
+ // Return all but that last byte, which would cause a syntax error if we let it through.
+ if r.err == nil && !r.eof {
+ info.header = r.buf[:len(r.buf)-1]
+ }
+
+ // If we stopped for a syntax error, consume the whole file so that
+ // we are sure we don't change the errors that go/parser returns.
+ if r.err == errSyntax {
+ r.err = nil
+ for r.err == nil && !r.eof {
+ r.readByte()
+ }
+ info.header = r.buf
+ }
+ if r.err != nil {
+ return r.err
+ }
+
+ if info.fset == nil {
+ return nil
+ }
+
+ // Parse file header & record imports.
+ info.parsed, info.parseErr = parser.ParseFile(info.fset, info.name, info.header, parser.ImportsOnly|parser.ParseComments)
+ if info.parseErr != nil {
+ return nil
+ }
+
+ hasEmbed := false
+ for _, decl := range info.parsed.Decls {
+ d, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+ for _, dspec := range d.Specs {
+ spec, ok := dspec.(*ast.ImportSpec)
+ if !ok {
+ continue
+ }
+ quoted := spec.Path.Value
+ path, err := strconv.Unquote(quoted)
+ if err != nil {
+ return fmt.Errorf("parser returned invalid quoted string: <%s>", quoted)
+ }
+ if !isValidImport(path) {
+ // The parser used to return a parse error for invalid import paths, but
+ // no longer does, so check for and create the error here instead.
+ info.parseErr = scanner.Error{Pos: info.fset.Position(spec.Pos()), Msg: "invalid import path: " + path}
+ info.imports = nil
+ return nil
+ }
+ if path == "embed" {
+ hasEmbed = true
+ }
+
+ doc := spec.Doc
+ if doc == nil && len(d.Specs) == 1 {
+ doc = d.Doc
+ }
+ info.imports = append(info.imports, fileImport{path, spec.Pos(), doc})
+ }
+ }
+
+ // If the file imports "embed",
+ // we have to look for //go:embed comments
+ // in the remainder of the file.
+ // The compiler will enforce the mapping of comments to
+ // declared variables. We just need to know the patterns.
+ // If there were //go:embed comments earlier in the file
+ // (near the package statement or imports), the compiler
+ // will reject them. They can be (and have already been) ignored.
+ if hasEmbed {
+ var line []byte
+ for first := true; r.findEmbed(first); first = false {
+ line = line[:0]
+ pos := r.pos
+ for {
+ c := r.readByteNoBuf()
+ if c == '\n' || r.err != nil || r.eof {
+ break
+ }
+ line = append(line, c)
+ }
+ // Add args if line is well-formed.
+ // Ignore badly-formed lines - the compiler will report them when it finds them,
+ // and we can pretend they are not there to help go list succeed with what it knows.
+ embs, err := parseGoEmbed(string(line), pos)
+ if err == nil {
+ info.embeds = append(info.embeds, embs...)
+ }
+ }
+ }
+
+ return nil
+}
+
+// isValidImport checks if the import is a valid import using the more strict
+// checks allowed by the implementation restriction in https://go.dev/ref/spec#Import_declarations.
+// It was ported from the function of the same name that was removed from the
+// parser in CL 424855, when the parser stopped doing these checks.
+func isValidImport(s string) bool {
+ const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD"
+ for _, r := range s {
+ if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) {
+ return false
+ }
+ }
+ return s != ""
+}
+
+// parseGoEmbed parses the text following "//go:embed" to extract the glob patterns.
+// It accepts unquoted space-separated patterns as well as double-quoted and back-quoted Go strings.
+// This is based on a similar function in cmd/compile/internal/gc/noder.go;
+// this version calculates position information as well.
+func parseGoEmbed(args string, pos token.Position) ([]fileEmbed, error) {
+ trimBytes := func(n int) {
+ pos.Offset += n
+ pos.Column += utf8.RuneCountInString(args[:n])
+ args = args[n:]
+ }
+ trimSpace := func() {
+ trim := strings.TrimLeftFunc(args, unicode.IsSpace)
+ trimBytes(len(args) - len(trim))
+ }
+
+ var list []fileEmbed
+ for trimSpace(); args != ""; trimSpace() {
+ var path string
+ pathPos := pos
+ Switch:
+ switch args[0] {
+ default:
+ i := len(args)
+ for j, c := range args {
+ if unicode.IsSpace(c) {
+ i = j
+ break
+ }
+ }
+ path = args[:i]
+ trimBytes(i)
+
+ case '`':
+ var ok bool
+ path, _, ok = strings.Cut(args[1:], "`")
+ if !ok {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ trimBytes(1 + len(path) + 1)
+
+ case '"':
+ i := 1
+ for ; i < len(args); i++ {
+ if args[i] == '\\' {
+ i++
+ continue
+ }
+ if args[i] == '"' {
+ q, err := strconv.Unquote(args[:i+1])
+ if err != nil {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1])
+ }
+ path = q
+ trimBytes(i + 1)
+ break Switch
+ }
+ }
+ if i >= len(args) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+
+ if args != "" {
+ r, _ := utf8.DecodeRuneInString(args)
+ if !unicode.IsSpace(r) {
+ return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args)
+ }
+ }
+ list = append(list, fileEmbed{path, pathPos})
+ }
+ return list, nil
+}
diff --git a/src/go/build/read_test.go b/src/go/build/read_test.go
new file mode 100644
index 0000000..6851e6b
--- /dev/null
+++ b/src/go/build/read_test.go
@@ -0,0 +1,352 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "fmt"
+ "go/token"
+ "io"
+ "strings"
+ "testing"
+)
+
+const quote = "`"
+
+type readTest struct {
+ // Test input contains ℙ where readGoInfo should stop.
+ in string
+ err string
+}
+
+var readGoInfoTests = []readTest{
+ {
+ `package p`,
+ "",
+ },
+ {
+ `package p; import "x"`,
+ "",
+ },
+ {
+ `package p; import . "x"`,
+ "",
+ },
+ {
+ `package p; import "x";ℙvar x = 1`,
+ "",
+ },
+ {
+ `package p
+
+ // comment
+
+ import "x"
+ import _ "x"
+ import a "x"
+
+ /* comment */
+
+ import (
+ "x" /* comment */
+ _ "x"
+ a "x" // comment
+ ` + quote + `x` + quote + `
+ _ /*comment*/ ` + quote + `x` + quote + `
+ a ` + quote + `x` + quote + `
+ )
+ import (
+ )
+ import ()
+ import()import()import()
+ import();import();import()
+
+ ℙvar x = 1
+ `,
+ "",
+ },
+ {
+ "\ufeff𝔻" + `package p; import "x";ℙvar x = 1`,
+ "",
+ },
+}
+
+var readCommentsTests = []readTest{
+ {
+ `ℙpackage p`,
+ "",
+ },
+ {
+ `ℙpackage p; import "x"`,
+ "",
+ },
+ {
+ `ℙpackage p; import . "x"`,
+ "",
+ },
+ {
+ "\ufeff𝔻" + `ℙpackage p; import . "x"`,
+ "",
+ },
+ {
+ `// foo
+
+ /* bar */
+
+ /* quux */ // baz
+
+ /*/ zot */
+
+ // asdf
+ ℙHello, world`,
+ "",
+ },
+ {
+ "\ufeff𝔻" + `// foo
+
+ /* bar */
+
+ /* quux */ // baz
+
+ /*/ zot */
+
+ // asdf
+ ℙHello, world`,
+ "",
+ },
+}
+
+func testRead(t *testing.T, tests []readTest, read func(io.Reader) ([]byte, error)) {
+ for i, tt := range tests {
+ beforeP, afterP, _ := strings.Cut(tt.in, "ℙ")
+ in := beforeP + afterP
+ testOut := beforeP
+
+ if beforeD, afterD, ok := strings.Cut(beforeP, "𝔻"); ok {
+ in = beforeD + afterD + afterP
+ testOut = afterD
+ }
+
+ r := strings.NewReader(in)
+ buf, err := read(r)
+ if err != nil {
+ if tt.err == "" {
+ t.Errorf("#%d: err=%q, expected success (%q)", i, err, string(buf))
+ } else if !strings.Contains(err.Error(), tt.err) {
+ t.Errorf("#%d: err=%q, expected %q", i, err, tt.err)
+ }
+ continue
+ }
+ if tt.err != "" {
+ t.Errorf("#%d: success, expected %q", i, tt.err)
+ continue
+ }
+
+ out := string(buf)
+ if out != testOut {
+ t.Errorf("#%d: wrong output:\nhave %q\nwant %q\n", i, out, testOut)
+ }
+ }
+}
+
+func TestReadGoInfo(t *testing.T) {
+ testRead(t, readGoInfoTests, func(r io.Reader) ([]byte, error) {
+ var info fileInfo
+ err := readGoInfo(r, &info)
+ return info.header, err
+ })
+}
+
+func TestReadComments(t *testing.T) {
+ testRead(t, readCommentsTests, readComments)
+}
+
+var readFailuresTests = []readTest{
+ {
+ `package`,
+ "syntax error",
+ },
+ {
+ "package p\n\x00\nimport `math`\n",
+ "unexpected NUL in input",
+ },
+ {
+ `package p; import`,
+ "syntax error",
+ },
+ {
+ `package p; import "`,
+ "syntax error",
+ },
+ {
+ "package p; import ` \n\n",
+ "syntax error",
+ },
+ {
+ `package p; import "x`,
+ "syntax error",
+ },
+ {
+ `package p; import _`,
+ "syntax error",
+ },
+ {
+ `package p; import _ "`,
+ "syntax error",
+ },
+ {
+ `package p; import _ "x`,
+ "syntax error",
+ },
+ {
+ `package p; import .`,
+ "syntax error",
+ },
+ {
+ `package p; import . "`,
+ "syntax error",
+ },
+ {
+ `package p; import . "x`,
+ "syntax error",
+ },
+ {
+ `package p; import (`,
+ "syntax error",
+ },
+ {
+ `package p; import ("`,
+ "syntax error",
+ },
+ {
+ `package p; import ("x`,
+ "syntax error",
+ },
+ {
+ `package p; import ("x"`,
+ "syntax error",
+ },
+}
+
+func TestReadFailuresIgnored(t *testing.T) {
+ // Syntax errors should not be reported (false arg to readImports).
+ // Instead, entire file should be the output and no error.
+ // Convert tests not to return syntax errors.
+ tests := make([]readTest, len(readFailuresTests))
+ copy(tests, readFailuresTests)
+ for i := range tests {
+ tt := &tests[i]
+ if !strings.Contains(tt.err, "NUL") {
+ tt.err = ""
+ }
+ }
+ testRead(t, tests, func(r io.Reader) ([]byte, error) {
+ var info fileInfo
+ err := readGoInfo(r, &info)
+ return info.header, err
+ })
+}
+
+var readEmbedTests = []struct {
+ in, out string
+}{
+ {
+ "package p\n",
+ "",
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x y z\nvar files embed.FS",
+ `test:4:12:x
+ test:4:14:y
+ test:4:16:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x \"\\x79\" `z`\nvar files embed.FS",
+ `test:4:12:x
+ test:4:14:y
+ test:4:21:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n//go:embed x y\n//go:embed z\nvar files embed.FS",
+ `test:4:12:x
+ test:4:14:y
+ test:5:12:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar i int\n\t //go:embed x y\n\t //go:embed z\n\t var files embed.FS",
+ `test:4:14:x
+ test:4:16:y
+ test:5:14:z`,
+ },
+ {
+ "package p\nimport \"embed\"\n//go:embed x y z\nvar files embed.FS",
+ `test:3:12:x
+ test:3:14:y
+ test:3:16:z`,
+ },
+ {
+ "\ufeffpackage p\nimport \"embed\"\n//go:embed x y z\nvar files embed.FS",
+ `test:3:12:x
+ test:3:14:y
+ test:3:16:z`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar s = \"/*\"\n//go:embed x\nvar files embed.FS",
+ `test:4:12:x`,
+ },
+ {
+ `package p
+ import "embed"
+ var s = "\"\\\\"
+ //go:embed x
+ var files embed.FS`,
+ `test:4:15:x`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar s = `/*`\n//go:embed x\nvar files embed.FS",
+ `test:4:12:x`,
+ },
+ {
+ "package p\nimport \"embed\"\nvar s = z/ *y\n//go:embed pointer\nvar pointer embed.FS",
+ "test:4:12:pointer",
+ },
+ {
+ "package p\n//go:embed x y z\n", // no import, no scan
+ "",
+ },
+ {
+ "package p\n//go:embed x y z\nvar files embed.FS", // no import, no scan
+ "",
+ },
+ {
+ "\ufeffpackage p\n//go:embed x y z\nvar files embed.FS", // no import, no scan
+ "",
+ },
+}
+
+func TestReadEmbed(t *testing.T) {
+ fset := token.NewFileSet()
+ for i, tt := range readEmbedTests {
+ info := fileInfo{
+ name: "test",
+ fset: fset,
+ }
+ err := readGoInfo(strings.NewReader(tt.in), &info)
+ if err != nil {
+ t.Errorf("#%d: %v", i, err)
+ continue
+ }
+ b := &strings.Builder{}
+ sep := ""
+ for _, emb := range info.embeds {
+ fmt.Fprintf(b, "%s%v:%s", sep, emb.pos, emb.pattern)
+ sep = "\n"
+ }
+ got := b.String()
+ want := strings.Join(strings.Fields(tt.out), "\n")
+ if got != want {
+ t.Errorf("#%d: embeds:\n%s\nwant:\n%s", i, got, want)
+ }
+ }
+}
diff --git a/src/go/build/syslist.go b/src/go/build/syslist.go
new file mode 100644
index 0000000..78ca565
--- /dev/null
+++ b/src/go/build/syslist.go
@@ -0,0 +1,80 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+// Note that this file is read by internal/goarch/gengoarch.go and by
+// internal/goos/gengoos.go. If you change this file, look at those
+// files as well.
+
+// knownOS is the list of past, present, and future known GOOS values.
+// Do not remove from this list, as it is used for filename matching.
+// If you add an entry to this list, look at unixOS, below.
+var knownOS = map[string]bool{
+ "aix": true,
+ "android": true,
+ "darwin": true,
+ "dragonfly": true,
+ "freebsd": true,
+ "hurd": true,
+ "illumos": true,
+ "ios": true,
+ "js": true,
+ "linux": true,
+ "nacl": true,
+ "netbsd": true,
+ "openbsd": true,
+ "plan9": true,
+ "solaris": true,
+ "windows": true,
+ "zos": true,
+}
+
+// unixOS is the set of GOOS values matched by the "unix" build tag.
+// This is not used for filename matching.
+// This list also appears in cmd/dist/build.go and
+// cmd/go/internal/imports/build.go.
+var unixOS = map[string]bool{
+ "aix": true,
+ "android": true,
+ "darwin": true,
+ "dragonfly": true,
+ "freebsd": true,
+ "hurd": true,
+ "illumos": true,
+ "ios": true,
+ "linux": true,
+ "netbsd": true,
+ "openbsd": true,
+ "solaris": true,
+}
+
+// knownArch is the list of past, present, and future known GOARCH values.
+// Do not remove from this list, as it is used for filename matching.
+var knownArch = map[string]bool{
+ "386": true,
+ "amd64": true,
+ "amd64p32": true,
+ "arm": true,
+ "armbe": true,
+ "arm64": true,
+ "arm64be": true,
+ "loong64": true,
+ "mips": true,
+ "mipsle": true,
+ "mips64": true,
+ "mips64le": true,
+ "mips64p32": true,
+ "mips64p32le": true,
+ "ppc": true,
+ "ppc64": true,
+ "ppc64le": true,
+ "riscv": true,
+ "riscv64": true,
+ "s390": true,
+ "s390x": true,
+ "sparc": true,
+ "sparc64": true,
+ "wasm": true,
+}
diff --git a/src/go/build/syslist_test.go b/src/go/build/syslist_test.go
new file mode 100644
index 0000000..2b7b4c7
--- /dev/null
+++ b/src/go/build/syslist_test.go
@@ -0,0 +1,62 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ "runtime"
+ "testing"
+)
+
+var (
+ thisOS = runtime.GOOS
+ thisArch = runtime.GOARCH
+ otherOS = anotherOS()
+ otherArch = anotherArch()
+)
+
+func anotherOS() string {
+ if thisOS != "darwin" && thisOS != "ios" {
+ return "darwin"
+ }
+ return "linux"
+}
+
+func anotherArch() string {
+ if thisArch != "amd64" {
+ return "amd64"
+ }
+ return "386"
+}
+
+type GoodFileTest struct {
+ name string
+ result bool
+}
+
+var tests = []GoodFileTest{
+ {"file.go", true},
+ {"file.c", true},
+ {"file_foo.go", true},
+ {"file_" + thisArch + ".go", true},
+ {"file_" + otherArch + ".go", false},
+ {"file_" + thisOS + ".go", true},
+ {"file_" + otherOS + ".go", false},
+ {"file_" + thisOS + "_" + thisArch + ".go", true},
+ {"file_" + otherOS + "_" + thisArch + ".go", false},
+ {"file_" + thisOS + "_" + otherArch + ".go", false},
+ {"file_" + otherOS + "_" + otherArch + ".go", false},
+ {"file_foo_" + thisArch + ".go", true},
+ {"file_foo_" + otherArch + ".go", false},
+ {"file_" + thisOS + ".c", true},
+ {"file_" + otherOS + ".c", false},
+}
+
+func TestGoodOSArch(t *testing.T) {
+ for _, test := range tests {
+ if Default.goodOSArchFile(test.name, make(map[string]bool)) != test.result {
+ t.Fatalf("goodOSArchFile(%q) != %v", test.name, test.result)
+ }
+ }
+}
diff --git a/src/go/build/testdata/alltags/alltags.go b/src/go/build/testdata/alltags/alltags.go
new file mode 100644
index 0000000..5d30855
--- /dev/null
+++ b/src/go/build/testdata/alltags/alltags.go
@@ -0,0 +1,5 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package alltags
diff --git a/src/go/build/testdata/alltags/x_netbsd_arm.go b/src/go/build/testdata/alltags/x_netbsd_arm.go
new file mode 100644
index 0000000..5d30855
--- /dev/null
+++ b/src/go/build/testdata/alltags/x_netbsd_arm.go
@@ -0,0 +1,5 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package alltags
diff --git a/src/go/build/testdata/bads/bad.s b/src/go/build/testdata/bads/bad.s
new file mode 100644
index 0000000..b670f82
--- /dev/null
+++ b/src/go/build/testdata/bads/bad.s
@@ -0,0 +1 @@
+;/
diff --git a/src/go/build/testdata/cgo_disabled/cgo_disabled.go b/src/go/build/testdata/cgo_disabled/cgo_disabled.go
new file mode 100644
index 0000000..d1edb99
--- /dev/null
+++ b/src/go/build/testdata/cgo_disabled/cgo_disabled.go
@@ -0,0 +1,5 @@
+package cgo_disabled
+
+import "C"
+
+import _ "should/be/ignored"
diff --git a/src/go/build/testdata/cgo_disabled/empty.go b/src/go/build/testdata/cgo_disabled/empty.go
new file mode 100644
index 0000000..63afe42
--- /dev/null
+++ b/src/go/build/testdata/cgo_disabled/empty.go
@@ -0,0 +1 @@
+package cgo_disabled
diff --git a/src/go/build/testdata/doc/a_test.go b/src/go/build/testdata/doc/a_test.go
new file mode 100644
index 0000000..1c07b56
--- /dev/null
+++ b/src/go/build/testdata/doc/a_test.go
@@ -0,0 +1,2 @@
+// Doc from xtests
+package doc_test
diff --git a/src/go/build/testdata/doc/b_test.go b/src/go/build/testdata/doc/b_test.go
new file mode 100644
index 0000000..0cf1605
--- /dev/null
+++ b/src/go/build/testdata/doc/b_test.go
@@ -0,0 +1 @@
+package doc_test
diff --git a/src/go/build/testdata/doc/c_test.go b/src/go/build/testdata/doc/c_test.go
new file mode 100644
index 0000000..1025707
--- /dev/null
+++ b/src/go/build/testdata/doc/c_test.go
@@ -0,0 +1 @@
+package doc
diff --git a/src/go/build/testdata/doc/d_test.go b/src/go/build/testdata/doc/d_test.go
new file mode 100644
index 0000000..ec19564
--- /dev/null
+++ b/src/go/build/testdata/doc/d_test.go
@@ -0,0 +1,2 @@
+// Doc from regular tests.
+package doc
diff --git a/src/go/build/testdata/doc/e.go b/src/go/build/testdata/doc/e.go
new file mode 100644
index 0000000..1025707
--- /dev/null
+++ b/src/go/build/testdata/doc/e.go
@@ -0,0 +1 @@
+package doc
diff --git a/src/go/build/testdata/doc/f.go b/src/go/build/testdata/doc/f.go
new file mode 100644
index 0000000..ab1d0bc
--- /dev/null
+++ b/src/go/build/testdata/doc/f.go
@@ -0,0 +1,2 @@
+// Correct
+package doc
diff --git a/src/go/build/testdata/empty/dummy b/src/go/build/testdata/empty/dummy
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/go/build/testdata/empty/dummy
diff --git a/src/go/build/testdata/multi/file.go b/src/go/build/testdata/multi/file.go
new file mode 100644
index 0000000..ee946eb
--- /dev/null
+++ b/src/go/build/testdata/multi/file.go
@@ -0,0 +1,5 @@
+// Test data - not compiled.
+
+package main
+
+func main() {}
diff --git a/src/go/build/testdata/multi/file_appengine.go b/src/go/build/testdata/multi/file_appengine.go
new file mode 100644
index 0000000..4ea31e7
--- /dev/null
+++ b/src/go/build/testdata/multi/file_appengine.go
@@ -0,0 +1,5 @@
+// Test data - not compiled.
+
+package test_package
+
+func init() {}
diff --git a/src/go/build/testdata/non_source_tags/non_source_tags.go b/src/go/build/testdata/non_source_tags/non_source_tags.go
new file mode 100644
index 0000000..068acc4
--- /dev/null
+++ b/src/go/build/testdata/non_source_tags/non_source_tags.go
@@ -0,0 +1,5 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package non_source_tags
diff --git a/src/go/build/testdata/non_source_tags/x_arm.go.ignore b/src/go/build/testdata/non_source_tags/x_arm.go.ignore
new file mode 100644
index 0000000..068acc4
--- /dev/null
+++ b/src/go/build/testdata/non_source_tags/x_arm.go.ignore
@@ -0,0 +1,5 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package non_source_tags
diff --git a/src/go/build/testdata/other/file/file.go b/src/go/build/testdata/other/file/file.go
new file mode 100644
index 0000000..bbfd3e9
--- /dev/null
+++ b/src/go/build/testdata/other/file/file.go
@@ -0,0 +1,5 @@
+// Test data - not compiled.
+
+package file
+
+func F() {}
diff --git a/src/go/build/testdata/other/main.go b/src/go/build/testdata/other/main.go
new file mode 100644
index 0000000..e090435
--- /dev/null
+++ b/src/go/build/testdata/other/main.go
@@ -0,0 +1,11 @@
+// Test data - not compiled.
+
+package main
+
+import (
+ "./file"
+)
+
+func main() {
+ file.F()
+}
diff --git a/src/go/build/testdata/withvendor/src/a/b/b.go b/src/go/build/testdata/withvendor/src/a/b/b.go
new file mode 100644
index 0000000..4405d54
--- /dev/null
+++ b/src/go/build/testdata/withvendor/src/a/b/b.go
@@ -0,0 +1,3 @@
+package b
+
+import _ "c/d"
diff --git a/src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go b/src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go
new file mode 100644
index 0000000..142fb42
--- /dev/null
+++ b/src/go/build/testdata/withvendor/src/a/vendor/c/d/d.go
@@ -0,0 +1 @@
+package d