diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-16 19:25:22 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-16 19:25:22 +0000 |
commit | f6ad4dcef54c5ce997a4bad5a6d86de229015700 (patch) | |
tree | 7cfa4e31ace5c2bd95c72b154d15af494b2bcbef /src/cmd/compile/internal/importer | |
parent | Initial commit. (diff) | |
download | golang-1.22-f6ad4dcef54c5ce997a4bad5a6d86de229015700.tar.xz golang-1.22-f6ad4dcef54c5ce997a4bad5a6d86de229015700.zip |
Adding upstream version 1.22.1.upstream/1.22.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/cmd/compile/internal/importer')
16 files changed, 2672 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/importer/exportdata.go b/src/cmd/compile/internal/importer/exportdata.go new file mode 100644 index 0000000..42fc5c9 --- /dev/null +++ b/src/cmd/compile/internal/importer/exportdata.go @@ -0,0 +1,95 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements FindExportData. + +package importer + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" +) + +func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { + // See $GOROOT/include/ar.h. + hdr := make([]byte, 16+12+6+6+8+10+2) + _, err = io.ReadFull(r, hdr) + if err != nil { + return + } + // leave for debugging + if false { + fmt.Printf("header: %s", hdr) + } + s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) + size, err = strconv.Atoi(s) + if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { + err = fmt.Errorf("invalid archive header") + return + } + name = strings.TrimSpace(string(hdr[:16])) + return +} + +// FindExportData positions the reader r at the beginning of the +// export data section of an underlying GC-created object/archive +// file by reading from it. The reader must be positioned at the +// start of the file before calling this function. The hdr result +// is the string before the export data, either "$$" or "$$B". +// +// If size is non-negative, it's the number of bytes of export data +// still available to read from r. +func FindExportData(r *bufio.Reader) (hdr string, size int, err error) { + // Read first line to make sure this is an object file. + line, err := r.ReadSlice('\n') + if err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + + if string(line) == "!<arch>\n" { + // Archive file. Scan to __.PKGDEF. + var name string + if name, size, err = readGopackHeader(r); err != nil { + return + } + + // First entry should be __.PKGDEF. + if name != "__.PKGDEF" { + err = fmt.Errorf("go archive is missing __.PKGDEF") + return + } + + // Read first line of __.PKGDEF data, so that line + // is once again the first line of the input. + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + + // Now at __.PKGDEF in archive or still at beginning of file. + // Either way, line should begin with "go object ". + if !strings.HasPrefix(string(line), "go object ") { + err = fmt.Errorf("not a Go object file") + return + } + size -= len(line) + + // Skip over object header to export data. + // Begins after first line starting with $$. + for line[0] != '$' { + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + size -= len(line) + } + hdr = string(line) + + return +} diff --git a/src/cmd/compile/internal/importer/gcimporter.go b/src/cmd/compile/internal/importer/gcimporter.go new file mode 100644 index 0000000..1f7b49c --- /dev/null +++ b/src/cmd/compile/internal/importer/gcimporter.go @@ -0,0 +1,253 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// package importer implements Import for gc-generated object files. +package importer + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "go/build" + "internal/pkgbits" + "io" + "os" + "os/exec" + "path/filepath" + "strings" + "sync" + + "cmd/compile/internal/types2" +) + +var exportMap sync.Map // package dir → func() (string, error) + +// lookupGorootExport returns the location of the export data +// (normally found in the build cache, but located in GOROOT/pkg +// in prior Go releases) for the package located in pkgDir. +// +// (We use the package's directory instead of its import path +// mainly to simplify handling of the packages in src/vendor +// and cmd/vendor.) +func lookupGorootExport(pkgDir string) (string, error) { + f, ok := exportMap.Load(pkgDir) + if !ok { + var ( + listOnce sync.Once + exportPath string + err error + ) + f, _ = exportMap.LoadOrStore(pkgDir, func() (string, error) { + listOnce.Do(func() { + cmd := exec.Command(filepath.Join(build.Default.GOROOT, "bin", "go"), "list", "-export", "-f", "{{.Export}}", pkgDir) + cmd.Dir = build.Default.GOROOT + cmd.Env = append(os.Environ(), "PWD="+cmd.Dir, "GOROOT="+build.Default.GOROOT) + var output []byte + output, err = cmd.Output() + if err != nil { + if ee, ok := err.(*exec.ExitError); ok && len(ee.Stderr) > 0 { + err = errors.New(string(ee.Stderr)) + } + return + } + + exports := strings.Split(string(bytes.TrimSpace(output)), "\n") + if len(exports) != 1 { + err = fmt.Errorf("go list reported %d exports; expected 1", len(exports)) + return + } + + exportPath = exports[0] + }) + + return exportPath, err + }) + } + + return f.(func() (string, error))() +} + +var pkgExts = [...]string{".a", ".o"} // a file from the build cache will have no extension + +// FindPkg returns the filename and unique package id for an import +// path based on package information provided by build.Import (using +// the build.Default build.Context). A relative srcDir is interpreted +// relative to the current working directory. +func FindPkg(path, srcDir string) (filename, id string, err error) { + if path == "" { + return "", "", errors.New("path is empty") + } + + var noext string + switch { + default: + // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" + // Don't require the source files to be present. + if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 + srcDir = abs + } + var bp *build.Package + bp, err = build.Import(path, srcDir, build.FindOnly|build.AllowBinary) + if bp.PkgObj == "" { + if bp.Goroot && bp.Dir != "" { + filename, err = lookupGorootExport(bp.Dir) + if err == nil { + _, err = os.Stat(filename) + } + if err == nil { + return filename, bp.ImportPath, nil + } + } + goto notfound + } else { + noext = strings.TrimSuffix(bp.PkgObj, ".a") + } + id = bp.ImportPath + + case build.IsLocalImport(path): + // "./x" -> "/this/directory/x.ext", "/this/directory/x" + noext = filepath.Join(srcDir, path) + id = noext + + case filepath.IsAbs(path): + // for completeness only - go/build.Import + // does not support absolute imports + // "/x" -> "/x.ext", "/x" + noext = path + id = path + } + + if false { // for debugging + if path != id { + fmt.Printf("%s -> %s\n", path, id) + } + } + + // try extensions + for _, ext := range pkgExts { + filename = noext + ext + f, statErr := os.Stat(filename) + if statErr == nil && !f.IsDir() { + return filename, id, nil + } + if err == nil { + err = statErr + } + } + +notfound: + if err == nil { + return "", path, fmt.Errorf("can't find import: %q", path) + } + return "", path, fmt.Errorf("can't find import: %q: %w", path, err) +} + +// Import imports a gc-generated package given its import path and srcDir, adds +// the corresponding package object to the packages map, and returns the object. +// The packages map must contain all packages already imported. +func Import(packages map[string]*types2.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types2.Package, err error) { + var rc io.ReadCloser + var id string + if lookup != nil { + // With custom lookup specified, assume that caller has + // converted path to a canonical import path for use in the map. + if path == "unsafe" { + return types2.Unsafe, nil + } + id = path + + // No need to re-import if the package was imported completely before. + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + f, err := lookup(path) + if err != nil { + return nil, err + } + rc = f + } else { + var filename string + filename, id, err = FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types2.Unsafe, nil + } + return nil, err + } + + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if err != nil { + // add file name to error + err = fmt.Errorf("%s: %v", filename, err) + } + }() + rc = f + } + defer rc.Close() + + buf := bufio.NewReader(rc) + hdr, size, err := FindExportData(buf) + if err != nil { + return + } + + switch hdr { + case "$$\n": + err = fmt.Errorf("import %q: old textual export format no longer supported (recompile library)", path) + + case "$$B\n": + var data []byte + var r io.Reader = buf + if size >= 0 { + r = io.LimitReader(r, int64(size)) + } + data, err = io.ReadAll(r) + if err != nil { + break + } + + if len(data) == 0 { + err = fmt.Errorf("import %q: missing export data", path) + break + } + exportFormat := data[0] + s := string(data[1:]) + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + switch exportFormat { + case 'u': + s = s[:strings.Index(s, "\n$$\n")] + input := pkgbits.NewPkgDecoder(id, s) + pkg = ReadPackage(nil, packages, input) + case 'i': + pkg, err = ImportData(packages, s, id) + default: + err = fmt.Errorf("import %q: old binary export format no longer supported (recompile library)", path) + } + + default: + err = fmt.Errorf("import %q: unknown export data header: %q", path, hdr) + } + + return +} + +type byPath []*types2.Package + +func (a byPath) Len() int { return len(a) } +func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/src/cmd/compile/internal/importer/gcimporter_test.go b/src/cmd/compile/internal/importer/gcimporter_test.go new file mode 100644 index 0000000..7fe4445 --- /dev/null +++ b/src/cmd/compile/internal/importer/gcimporter_test.go @@ -0,0 +1,608 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package importer + +import ( + "bytes" + "cmd/compile/internal/types2" + "fmt" + "go/build" + "internal/testenv" + "os" + "os/exec" + "path" + "path/filepath" + "runtime" + "strings" + "testing" + "time" +) + +func TestMain(m *testing.M) { + build.Default.GOROOT = testenv.GOROOT(nil) + os.Exit(m.Run()) +} + +// compile runs the compiler on filename, with dirname as the working directory, +// and writes the output file to outdirname. +// compile gives the resulting package a packagepath of testdata/<filebasename>. +func compile(t *testing.T, dirname, filename, outdirname string, packagefiles map[string]string) string { + // filename must end with ".go" + basename, ok := strings.CutSuffix(filepath.Base(filename), ".go") + if !ok { + t.Helper() + t.Fatalf("filename doesn't end in .go: %s", filename) + } + objname := basename + ".o" + outname := filepath.Join(outdirname, objname) + pkgpath := path.Join("testdata", basename) + + importcfgfile := os.DevNull + if len(packagefiles) > 0 { + importcfgfile = filepath.Join(outdirname, basename) + ".importcfg" + importcfg := new(bytes.Buffer) + for k, v := range packagefiles { + fmt.Fprintf(importcfg, "packagefile %s=%s\n", k, v) + } + if err := os.WriteFile(importcfgfile, importcfg.Bytes(), 0655); err != nil { + t.Fatal(err) + } + } + + cmd := testenv.Command(t, testenv.GoToolPath(t), "tool", "compile", "-p", pkgpath, "-D", "testdata", "-importcfg", importcfgfile, "-o", outname, filename) + cmd.Dir = dirname + out, err := cmd.CombinedOutput() + if err != nil { + t.Helper() + t.Logf("%s", out) + t.Fatalf("go tool compile %s failed: %s", filename, err) + } + return outname +} + +func testPath(t *testing.T, path, srcDir string) *types2.Package { + t0 := time.Now() + pkg, err := Import(make(map[string]*types2.Package), path, srcDir, nil) + if err != nil { + t.Errorf("testPath(%s): %s", path, err) + return nil + } + t.Logf("testPath(%s): %v", path, time.Since(t0)) + return pkg +} + +func mktmpdir(t *testing.T) string { + tmpdir := t.TempDir() + if err := os.Mkdir(filepath.Join(tmpdir, "testdata"), 0700); err != nil { + t.Fatal("mktmpdir:", err) + } + return tmpdir +} + +func TestImportTestdata(t *testing.T) { + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + testenv.MustHaveGoBuild(t) + + testfiles := map[string][]string{ + "exports.go": {"go/ast", "go/token"}, + "generics.go": nil, + } + if true /* was goexperiment.Unified */ { + // TODO(mdempsky): Fix test below to flatten the transitive + // Package.Imports graph. Unified IR is more precise about + // recreating the package import graph. + testfiles["exports.go"] = []string{"go/ast"} + } + + for testfile, wantImports := range testfiles { + tmpdir := mktmpdir(t) + + importMap := map[string]string{} + for _, pkg := range wantImports { + export, _, err := FindPkg(pkg, "testdata") + if export == "" { + t.Fatalf("no export data found for %s: %v", pkg, err) + } + importMap[pkg] = export + } + + compile(t, "testdata", testfile, filepath.Join(tmpdir, "testdata"), importMap) + path := "./testdata/" + strings.TrimSuffix(testfile, ".go") + + if pkg := testPath(t, path, tmpdir); pkg != nil { + // The package's Imports list must include all packages + // explicitly imported by testfile, plus all packages + // referenced indirectly via exported objects in testfile. + got := fmt.Sprint(pkg.Imports()) + for _, want := range wantImports { + if !strings.Contains(got, want) { + t.Errorf(`Package("exports").Imports() = %s, does not contain %s`, got, want) + } + } + } + } +} + +func TestVersionHandling(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + const dir = "./testdata/versions" + list, err := os.ReadDir(dir) + if err != nil { + t.Fatal(err) + } + + tmpdir := mktmpdir(t) + corruptdir := filepath.Join(tmpdir, "testdata", "versions") + if err := os.Mkdir(corruptdir, 0700); err != nil { + t.Fatal(err) + } + + for _, f := range list { + name := f.Name() + if !strings.HasSuffix(name, ".a") { + continue // not a package file + } + if strings.Contains(name, "corrupted") { + continue // don't process a leftover corrupted file + } + pkgpath := "./" + name[:len(name)-2] + + if testing.Verbose() { + t.Logf("importing %s", name) + } + + // test that export data can be imported + _, err := Import(make(map[string]*types2.Package), pkgpath, dir, nil) + if err != nil { + // ok to fail if it fails with a no longer supported error for select files + if strings.Contains(err.Error(), "no longer supported") { + switch name { + case "test_go1.7_0.a", "test_go1.7_1.a", + "test_go1.8_4.a", "test_go1.8_5.a", + "test_go1.11_6b.a", "test_go1.11_999b.a": + continue + } + // fall through + } + // ok to fail if it fails with a newer version error for select files + if strings.Contains(err.Error(), "newer version") { + switch name { + case "test_go1.11_999i.a": + continue + } + // fall through + } + t.Errorf("import %q failed: %v", pkgpath, err) + continue + } + + // create file with corrupted export data + // 1) read file + data, err := os.ReadFile(filepath.Join(dir, name)) + if err != nil { + t.Fatal(err) + } + // 2) find export data + i := bytes.Index(data, []byte("\n$$B\n")) + 5 + j := bytes.Index(data[i:], []byte("\n$$\n")) + i + if i < 0 || j < 0 || i > j { + t.Fatalf("export data section not found (i = %d, j = %d)", i, j) + } + // 3) corrupt the data (increment every 7th byte) + for k := j - 13; k >= i; k -= 7 { + data[k]++ + } + // 4) write the file + pkgpath += "_corrupted" + filename := filepath.Join(corruptdir, pkgpath) + ".a" + os.WriteFile(filename, data, 0666) + + // test that importing the corrupted file results in an error + _, err = Import(make(map[string]*types2.Package), pkgpath, corruptdir, nil) + if err == nil { + t.Errorf("import corrupted %q succeeded", pkgpath) + } else if msg := err.Error(); !strings.Contains(msg, "version skew") { + t.Errorf("import %q error incorrect (%s)", pkgpath, msg) + } + } +} + +func TestImportStdLib(t *testing.T) { + if testing.Short() { + t.Skip("the imports can be expensive, and this test is especially slow when the build cache is empty") + } + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // Get list of packages in stdlib. Filter out test-only packages with {{if .GoFiles}} check. + var stderr bytes.Buffer + cmd := exec.Command("go", "list", "-f", "{{if .GoFiles}}{{.ImportPath}}{{end}}", "std") + cmd.Stderr = &stderr + out, err := cmd.Output() + if err != nil { + t.Fatalf("failed to run go list to determine stdlib packages: %v\nstderr:\n%v", err, stderr.String()) + } + pkgs := strings.Fields(string(out)) + + var nimports int + for _, pkg := range pkgs { + t.Run(pkg, func(t *testing.T) { + if testPath(t, pkg, filepath.Join(testenv.GOROOT(t), "src", path.Dir(pkg))) != nil { + nimports++ + } + }) + } + const minPkgs = 225 // 'GOOS=plan9 go1.18 list std | wc -l' reports 228; most other platforms have more. + if len(pkgs) < minPkgs { + t.Fatalf("too few packages (%d) were imported", nimports) + } + + t.Logf("tested %d imports", nimports) +} + +var importedObjectTests = []struct { + name string + want string +}{ + // non-interfaces + {"crypto.Hash", "type Hash uint"}, + {"go/ast.ObjKind", "type ObjKind int"}, + {"go/types.Qualifier", "type Qualifier func(*Package) string"}, + {"go/types.Comparable", "func Comparable(T Type) bool"}, + {"math.Pi", "const Pi untyped float"}, + {"math.Sin", "func Sin(x float64) float64"}, + {"go/ast.NotNilFilter", "func NotNilFilter(_ string, v reflect.Value) bool"}, + {"go/internal/gcimporter.FindPkg", "func FindPkg(path string, srcDir string) (filename string, id string, err error)"}, + + // interfaces + {"context.Context", "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key any) any}"}, + {"crypto.Decrypter", "type Decrypter interface{Decrypt(rand io.Reader, msg []byte, opts DecrypterOpts) (plaintext []byte, err error); Public() PublicKey}"}, + {"encoding.BinaryMarshaler", "type BinaryMarshaler interface{MarshalBinary() (data []byte, err error)}"}, + {"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"}, + {"io.ReadWriter", "type ReadWriter interface{Reader; Writer}"}, + {"go/ast.Node", "type Node interface{End() go/token.Pos; Pos() go/token.Pos}"}, + {"go/types.Type", "type Type interface{String() string; Underlying() Type}"}, +} + +func TestImportedTypes(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + for _, test := range importedObjectTests { + s := strings.Split(test.name, ".") + if len(s) != 2 { + t.Fatal("inconsistent test data") + } + importPath := s[0] + objName := s[1] + + pkg, err := Import(make(map[string]*types2.Package), importPath, ".", nil) + if err != nil { + t.Error(err) + continue + } + + obj := pkg.Scope().Lookup(objName) + if obj == nil { + t.Errorf("%s: object not found", test.name) + continue + } + + got := types2.ObjectString(obj, types2.RelativeTo(pkg)) + if got != test.want { + t.Errorf("%s: got %q; want %q", test.name, got, test.want) + } + + if named, _ := obj.Type().(*types2.Named); named != nil { + verifyInterfaceMethodRecvs(t, named, 0) + } + } +} + +// verifyInterfaceMethodRecvs verifies that method receiver types +// are named if the methods belong to a named interface type. +func verifyInterfaceMethodRecvs(t *testing.T, named *types2.Named, level int) { + // avoid endless recursion in case of an embedding bug that lead to a cycle + if level > 10 { + t.Errorf("%s: embeds itself", named) + return + } + + iface, _ := named.Underlying().(*types2.Interface) + if iface == nil { + return // not an interface + } + + // The unified IR importer always sets interface method receiver + // parameters to point to the Interface type, rather than the Named. + // See #49906. + // + // TODO(mdempsky): This is only true for the types2 importer. For + // the go/types importer, we duplicate the Interface and rewrite its + // receiver methods to match historical behavior. + var want types2.Type = named + if true /* was goexperiment.Unified */ { + want = iface + } + + // check explicitly declared methods + for i := 0; i < iface.NumExplicitMethods(); i++ { + m := iface.ExplicitMethod(i) + recv := m.Type().(*types2.Signature).Recv() + if recv == nil { + t.Errorf("%s: missing receiver type", m) + continue + } + if recv.Type() != want { + t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named) + } + } + + // check embedded interfaces (if they are named, too) + for i := 0; i < iface.NumEmbeddeds(); i++ { + // embedding of interfaces cannot have cycles; recursion will terminate + if etype, _ := iface.EmbeddedType(i).(*types2.Named); etype != nil { + verifyInterfaceMethodRecvs(t, etype, level+1) + } + } +} + +func TestIssue5815(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + pkg := importPkg(t, "strings", ".") + + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + if obj.Pkg() == nil { + t.Errorf("no pkg for %s", obj) + } + if tname, _ := obj.(*types2.TypeName); tname != nil { + named := tname.Type().(*types2.Named) + for i := 0; i < named.NumMethods(); i++ { + m := named.Method(i) + if m.Pkg() == nil { + t.Errorf("no pkg for %s", m) + } + } + } + } +} + +// Smoke test to ensure that imported methods get the correct package. +func TestCorrectMethodPackage(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + imports := make(map[string]*types2.Package) + _, err := Import(imports, "net/http", ".", nil) + if err != nil { + t.Fatal(err) + } + + mutex := imports["sync"].Scope().Lookup("Mutex").(*types2.TypeName).Type() + obj, _, _ := types2.LookupFieldOrMethod(types2.NewPointer(mutex), false, nil, "Lock") + lock := obj.(*types2.Func) + if got, want := lock.Pkg().Path(), "sync"; got != want { + t.Errorf("got package path %q; want %q", got, want) + } +} + +func TestIssue13566(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + tmpdir := mktmpdir(t) + testoutdir := filepath.Join(tmpdir, "testdata") + + // b.go needs to be compiled from the output directory so that the compiler can + // find the compiled package a. We pass the full path to compile() so that we + // don't have to copy the file to that directory. + bpath, err := filepath.Abs(filepath.Join("testdata", "b.go")) + if err != nil { + t.Fatal(err) + } + + jsonExport, _, err := FindPkg("encoding/json", "testdata") + if jsonExport == "" { + t.Fatalf("no export data found for encoding/json: %v", err) + } + + compile(t, "testdata", "a.go", testoutdir, map[string]string{"encoding/json": jsonExport}) + compile(t, testoutdir, bpath, testoutdir, map[string]string{"testdata/a": filepath.Join(testoutdir, "a.o")}) + + // import must succeed (test for issue at hand) + pkg := importPkg(t, "./testdata/b", tmpdir) + + // make sure all indirectly imported packages have names + for _, imp := range pkg.Imports() { + if imp.Name() == "" { + t.Errorf("no name for %s package", imp.Path()) + } + } +} + +func TestIssue13898(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // import go/internal/gcimporter which imports go/types partially + imports := make(map[string]*types2.Package) + _, err := Import(imports, "go/internal/gcimporter", ".", nil) + if err != nil { + t.Fatal(err) + } + + // look for go/types package + var goTypesPkg *types2.Package + for path, pkg := range imports { + if path == "go/types" { + goTypesPkg = pkg + break + } + } + if goTypesPkg == nil { + t.Fatal("go/types not found") + } + + // look for go/types.Object type + obj := lookupObj(t, goTypesPkg.Scope(), "Object") + typ, ok := obj.Type().(*types2.Named) + if !ok { + t.Fatalf("go/types.Object type is %v; wanted named type", typ) + } + + // lookup go/types.Object.Pkg method + m, index, indirect := types2.LookupFieldOrMethod(typ, false, nil, "Pkg") + if m == nil { + t.Fatalf("go/types.Object.Pkg not found (index = %v, indirect = %v)", index, indirect) + } + + // the method must belong to go/types + if m.Pkg().Path() != "go/types" { + t.Fatalf("found %v; want go/types", m.Pkg()) + } +} + +func TestIssue15517(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + tmpdir := mktmpdir(t) + + compile(t, "testdata", "p.go", filepath.Join(tmpdir, "testdata"), nil) + + // Multiple imports of p must succeed without redeclaration errors. + // We use an import path that's not cleaned up so that the eventual + // file path for the package is different from the package path; this + // will expose the error if it is present. + // + // (Issue: Both the textual and the binary importer used the file path + // of the package to be imported as key into the shared packages map. + // However, the binary importer then used the package path to identify + // the imported package to mark it as complete; effectively marking the + // wrong package as complete. By using an "unclean" package path, the + // file and package path are different, exposing the problem if present. + // The same issue occurs with vendoring.) + imports := make(map[string]*types2.Package) + for i := 0; i < 3; i++ { + if _, err := Import(imports, "./././testdata/p", tmpdir, nil); err != nil { + t.Fatal(err) + } + } +} + +func TestIssue15920(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + compileAndImportPkg(t, "issue15920") +} + +func TestIssue20046(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // "./issue20046".V.M must exist + pkg := compileAndImportPkg(t, "issue20046") + obj := lookupObj(t, pkg.Scope(), "V") + if m, index, indirect := types2.LookupFieldOrMethod(obj.Type(), false, nil, "M"); m == nil { + t.Fatalf("V.M not found (index = %v, indirect = %v)", index, indirect) + } +} +func TestIssue25301(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + compileAndImportPkg(t, "issue25301") +} + +func TestIssue25596(t *testing.T) { + testenv.MustHaveGoBuild(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + compileAndImportPkg(t, "issue25596") +} + +func importPkg(t *testing.T, path, srcDir string) *types2.Package { + pkg, err := Import(make(map[string]*types2.Package), path, srcDir, nil) + if err != nil { + t.Helper() + t.Fatal(err) + } + return pkg +} + +func compileAndImportPkg(t *testing.T, name string) *types2.Package { + t.Helper() + tmpdir := mktmpdir(t) + compile(t, "testdata", name+".go", filepath.Join(tmpdir, "testdata"), nil) + return importPkg(t, "./testdata/"+name, tmpdir) +} + +func lookupObj(t *testing.T, scope *types2.Scope, name string) types2.Object { + if obj := scope.Lookup(name); obj != nil { + return obj + } + t.Helper() + t.Fatalf("%s not found", name) + return nil +} diff --git a/src/cmd/compile/internal/importer/iimport.go b/src/cmd/compile/internal/importer/iimport.go new file mode 100644 index 0000000..4981347 --- /dev/null +++ b/src/cmd/compile/internal/importer/iimport.go @@ -0,0 +1,793 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed package import. +// See cmd/compile/internal/typecheck/iexport.go for the export data format. + +package importer + +import ( + "cmd/compile/internal/syntax" + "cmd/compile/internal/typecheck" + "cmd/compile/internal/types2" + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "io" + "math/big" + "sort" + "strings" +) + +type intReader struct { + *strings.Reader + path string +} + +func (r *intReader) int64() int64 { + i, err := binary.ReadVarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +func (r *intReader) uint64() uint64 { + i, err := binary.ReadUvarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +// Keep this in sync with constants in iexport.go. +const ( + iexportVersionGo1_11 = 0 + iexportVersionPosCol = 1 + iexportVersionGenerics = 2 + iexportVersionGo1_18 = 2 + + iexportVersionCurrent = 2 +) + +type ident struct { + pkg *types2.Package + name string +} + +const predeclReserved = 32 + +type itag uint64 + +const ( + // Types + definedType itag = iota + pointerType + sliceType + arrayType + chanType + mapType + signatureType + structType + interfaceType + typeParamType + instanceType + unionType +) + +// ImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func ImportData(imports map[string]*types2.Package, data, path string) (pkg *types2.Package, err error) { + const currentVersion = iexportVersionCurrent + version := int64(-1) + defer func() { + if e := recover(); e != nil { + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + r := &intReader{strings.NewReader(data), path} + + version = int64(r.uint64()) + switch version { + case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11: + default: + errorf("unknown iexport format version %d", version) + } + + sLen := int64(r.uint64()) + dLen := int64(r.uint64()) + + whence, _ := r.Seek(0, io.SeekCurrent) + stringData := data[whence : whence+sLen] + declData := data[whence+sLen : whence+sLen+dLen] + r.Seek(sLen+dLen, io.SeekCurrent) + + p := iimporter{ + exportVersion: version, + ipath: path, + version: int(version), + + stringData: stringData, + pkgCache: make(map[uint64]*types2.Package), + posBaseCache: make(map[uint64]*syntax.PosBase), + + declData: declData, + pkgIndex: make(map[*types2.Package]map[string]uint64), + typCache: make(map[uint64]types2.Type), + // Separate map for typeparams, keyed by their package and unique + // name (name with subscript). + tparamIndex: make(map[ident]*types2.TypeParam), + } + + for i, pt := range predeclared { + p.typCache[uint64(i)] = pt + } + + pkgList := make([]*types2.Package, r.uint64()) + for i := range pkgList { + pkgPathOff := r.uint64() + pkgPath := p.stringAt(pkgPathOff) + pkgName := p.stringAt(r.uint64()) + _ = int(r.uint64()) // was package height, but not necessary anymore. + + if pkgPath == "" { + pkgPath = path + } + pkg := imports[pkgPath] + if pkg == nil { + pkg = types2.NewPackage(pkgPath, pkgName) + imports[pkgPath] = pkg + } else { + if pkg.Name() != pkgName { + errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) + } + } + + p.pkgCache[pkgPathOff] = pkg + + nameIndex := make(map[string]uint64) + for nSyms := r.uint64(); nSyms > 0; nSyms-- { + name := p.stringAt(r.uint64()) + nameIndex[name] = r.uint64() + } + + p.pkgIndex[pkg] = nameIndex + pkgList[i] = pkg + } + + localpkg := pkgList[0] + + names := make([]string, 0, len(p.pkgIndex[localpkg])) + for name := range p.pkgIndex[localpkg] { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + p.doDecl(localpkg, name) + } + + // SetConstraint can't be called if the constraint type is not yet complete. + // When type params are created in the 'P' case of (*importReader).obj(), + // the associated constraint type may not be complete due to recursion. + // Therefore, we defer calling SetConstraint there, and call it here instead + // after all types are complete. + for _, d := range p.later { + d.t.SetConstraint(d.constraint) + } + // record all referenced packages as imports + list := append(([]*types2.Package)(nil), pkgList[1:]...) + sort.Sort(byPath(list)) + localpkg.SetImports(list) + + // package was imported completely and without errors + localpkg.MarkComplete() + + return localpkg, nil +} + +type setConstraintArgs struct { + t *types2.TypeParam + constraint types2.Type +} + +type iimporter struct { + exportVersion int64 + ipath string + version int + + stringData string + pkgCache map[uint64]*types2.Package + posBaseCache map[uint64]*syntax.PosBase + + declData string + pkgIndex map[*types2.Package]map[string]uint64 + typCache map[uint64]types2.Type + tparamIndex map[ident]*types2.TypeParam + + interfaceList []*types2.Interface + + // Arguments for calls to SetConstraint that are deferred due to recursive types + later []setConstraintArgs +} + +func (p *iimporter) doDecl(pkg *types2.Package, name string) { + // See if we've already imported this declaration. + if obj := pkg.Scope().Lookup(name); obj != nil { + return + } + + off, ok := p.pkgIndex[pkg][name] + if !ok { + errorf("%v.%v not in index", pkg, name) + } + + r := &importReader{p: p, currPkg: pkg} + r.declReader.Reset(p.declData[off:]) + + r.obj(name) +} + +func (p *iimporter) stringAt(off uint64) string { + var x [binary.MaxVarintLen64]byte + n := copy(x[:], p.stringData[off:]) + + slen, n := binary.Uvarint(x[:n]) + if n <= 0 { + errorf("varint failed") + } + spos := off + uint64(n) + return p.stringData[spos : spos+slen] +} + +func (p *iimporter) pkgAt(off uint64) *types2.Package { + if pkg, ok := p.pkgCache[off]; ok { + return pkg + } + path := p.stringAt(off) + errorf("missing package %q in %q", path, p.ipath) + return nil +} + +func (p *iimporter) posBaseAt(off uint64) *syntax.PosBase { + if posBase, ok := p.posBaseCache[off]; ok { + return posBase + } + filename := p.stringAt(off) + posBase := syntax.NewTrimmedFileBase(filename, true) + p.posBaseCache[off] = posBase + return posBase +} + +func (p *iimporter) typAt(off uint64, base *types2.Named) types2.Type { + if t, ok := p.typCache[off]; ok && canReuse(base, t) { + return t + } + + if off < predeclReserved { + errorf("predeclared type missing from cache: %v", off) + } + + r := &importReader{p: p} + r.declReader.Reset(p.declData[off-predeclReserved:]) + t := r.doType(base) + + if canReuse(base, t) { + p.typCache[off] = t + } + return t +} + +// canReuse reports whether the type rhs on the RHS of the declaration for def +// may be re-used. +// +// Specifically, if def is non-nil and rhs is an interface type with methods, it +// may not be re-used because we have a convention of setting the receiver type +// for interface methods to def. +func canReuse(def *types2.Named, rhs types2.Type) bool { + if def == nil { + return true + } + iface, _ := rhs.(*types2.Interface) + if iface == nil { + return true + } + // Don't use iface.Empty() here as iface may not be complete. + return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0 +} + +type importReader struct { + p *iimporter + declReader strings.Reader + currPkg *types2.Package + prevPosBase *syntax.PosBase + prevLine int64 + prevColumn int64 +} + +func (r *importReader) obj(name string) { + tag := r.byte() + pos := r.pos() + + switch tag { + case 'A': + typ := r.typ() + + r.declare(types2.NewTypeName(pos, r.currPkg, name, typ)) + + case 'C': + typ, val := r.value() + + r.declare(types2.NewConst(pos, r.currPkg, name, typ, val)) + + case 'F', 'G': + var tparams []*types2.TypeParam + if tag == 'G' { + tparams = r.tparamList() + } + sig := r.signature(nil, nil, tparams) + r.declare(types2.NewFunc(pos, r.currPkg, name, sig)) + + case 'T', 'U': + // Types can be recursive. We need to setup a stub + // declaration before recursing. + obj := types2.NewTypeName(pos, r.currPkg, name, nil) + named := types2.NewNamed(obj, nil, nil) + // Declare obj before calling r.tparamList, so the new type name is recognized + // if used in the constraint of one of its own typeparams (see #48280). + r.declare(obj) + if tag == 'U' { + tparams := r.tparamList() + named.SetTypeParams(tparams) + } + + underlying := r.p.typAt(r.uint64(), named).Underlying() + named.SetUnderlying(underlying) + + if !isInterface(underlying) { + for n := r.uint64(); n > 0; n-- { + mpos := r.pos() + mname := r.ident() + recv := r.param() + + // If the receiver has any targs, set those as the + // rparams of the method (since those are the + // typeparams being used in the method sig/body). + targs := baseType(recv.Type()).TypeArgs() + var rparams []*types2.TypeParam + if targs.Len() > 0 { + rparams = make([]*types2.TypeParam, targs.Len()) + for i := range rparams { + rparams[i], _ = targs.At(i).(*types2.TypeParam) + } + } + msig := r.signature(recv, rparams, nil) + + named.AddMethod(types2.NewFunc(mpos, r.currPkg, mname, msig)) + } + } + + case 'P': + // We need to "declare" a typeparam in order to have a name that + // can be referenced recursively (if needed) in the type param's + // bound. + if r.p.exportVersion < iexportVersionGenerics { + errorf("unexpected type param type") + } + name0 := typecheck.TparamName(name) + if name0 == "" { + errorf("malformed type parameter export name %s: missing prefix", name) + } + + tn := types2.NewTypeName(pos, r.currPkg, name0, nil) + t := types2.NewTypeParam(tn, nil) + // To handle recursive references to the typeparam within its + // bound, save the partial type in tparamIndex before reading the bounds. + id := ident{r.currPkg, name} + r.p.tparamIndex[id] = t + + var implicit bool + if r.p.exportVersion >= iexportVersionGo1_18 { + implicit = r.bool() + } + constraint := r.typ() + if implicit { + iface, _ := constraint.(*types2.Interface) + if iface == nil { + errorf("non-interface constraint marked implicit") + } + iface.MarkImplicit() + } + // The constraint type may not be complete, if we + // are in the middle of a type recursion involving type + // constraints. So, we defer SetConstraint until we have + // completely set up all types in ImportData. + r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint}) + + case 'V': + typ := r.typ() + + r.declare(types2.NewVar(pos, r.currPkg, name, typ)) + + default: + errorf("unexpected tag: %v", tag) + } +} + +func (r *importReader) declare(obj types2.Object) { + obj.Pkg().Scope().Insert(obj) +} + +func (r *importReader) value() (typ types2.Type, val constant.Value) { + typ = r.typ() + if r.p.exportVersion >= iexportVersionGo1_18 { + // TODO: add support for using the kind + _ = constant.Kind(r.int64()) + } + + switch b := typ.Underlying().(*types2.Basic); b.Info() & types2.IsConstType { + case types2.IsBoolean: + val = constant.MakeBool(r.bool()) + + case types2.IsString: + val = constant.MakeString(r.string()) + + case types2.IsInteger: + var x big.Int + r.mpint(&x, b) + val = constant.Make(&x) + + case types2.IsFloat: + val = r.mpfloat(b) + + case types2.IsComplex: + re := r.mpfloat(b) + im := r.mpfloat(b) + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + default: + errorf("unexpected type %v", typ) // panics + panic("unreachable") + } + + return +} + +func intSize(b *types2.Basic) (signed bool, maxBytes uint) { + if (b.Info() & types2.IsUntyped) != 0 { + return true, 64 + } + + switch b.Kind() { + case types2.Float32, types2.Complex64: + return true, 3 + case types2.Float64, types2.Complex128: + return true, 7 + } + + signed = (b.Info() & types2.IsUnsigned) == 0 + switch b.Kind() { + case types2.Int8, types2.Uint8: + maxBytes = 1 + case types2.Int16, types2.Uint16: + maxBytes = 2 + case types2.Int32, types2.Uint32: + maxBytes = 4 + default: + maxBytes = 8 + } + + return +} + +func (r *importReader) mpint(x *big.Int, typ *types2.Basic) { + signed, maxBytes := intSize(typ) + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + n, _ := r.declReader.ReadByte() + if uint(n) < maxSmall { + v := int64(n) + if signed { + v >>= 1 + if n&1 != 0 { + v = ^v + } + } + x.SetInt64(v) + return + } + + v := -n + if signed { + v = -(n &^ 1) >> 1 + } + if v < 1 || uint(v) > maxBytes { + errorf("weird decoding: %v, %v => %v", n, signed, v) + } + b := make([]byte, v) + io.ReadFull(&r.declReader, b) + x.SetBytes(b) + if signed && n&1 != 0 { + x.Neg(x) + } +} + +func (r *importReader) mpfloat(typ *types2.Basic) constant.Value { + var mant big.Int + r.mpint(&mant, typ) + var f big.Float + f.SetInt(&mant) + if f.Sign() != 0 { + f.SetMantExp(&f, int(r.int64())) + } + return constant.Make(&f) +} + +func (r *importReader) ident() string { + return r.string() +} + +func (r *importReader) qualifiedIdent() (*types2.Package, string) { + name := r.string() + pkg := r.pkg() + return pkg, name +} + +func (r *importReader) pos() syntax.Pos { + if r.p.version >= 1 { + r.posv1() + } else { + r.posv0() + } + + if (r.prevPosBase == nil || r.prevPosBase.Filename() == "") && r.prevLine == 0 && r.prevColumn == 0 { + return syntax.Pos{} + } + + return syntax.MakePos(r.prevPosBase, uint(r.prevLine), uint(r.prevColumn)) +} + +func (r *importReader) posv0() { + delta := r.int64() + if delta != deltaNewFile { + r.prevLine += delta + } else if l := r.int64(); l == -1 { + r.prevLine += deltaNewFile + } else { + r.prevPosBase = r.posBase() + r.prevLine = l + } +} + +func (r *importReader) posv1() { + delta := r.int64() + r.prevColumn += delta >> 1 + if delta&1 != 0 { + delta = r.int64() + r.prevLine += delta >> 1 + if delta&1 != 0 { + r.prevPosBase = r.posBase() + } + } +} + +func (r *importReader) typ() types2.Type { + return r.p.typAt(r.uint64(), nil) +} + +func isInterface(t types2.Type) bool { + _, ok := t.(*types2.Interface) + return ok +} + +func (r *importReader) pkg() *types2.Package { return r.p.pkgAt(r.uint64()) } +func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } +func (r *importReader) posBase() *syntax.PosBase { return r.p.posBaseAt(r.uint64()) } + +func (r *importReader) doType(base *types2.Named) types2.Type { + switch k := r.kind(); k { + default: + errorf("unexpected kind tag in %q: %v", r.p.ipath, k) + return nil + + case definedType: + pkg, name := r.qualifiedIdent() + r.p.doDecl(pkg, name) + return pkg.Scope().Lookup(name).(*types2.TypeName).Type() + case pointerType: + return types2.NewPointer(r.typ()) + case sliceType: + return types2.NewSlice(r.typ()) + case arrayType: + n := r.uint64() + return types2.NewArray(r.typ(), int64(n)) + case chanType: + dir := chanDir(int(r.uint64())) + return types2.NewChan(dir, r.typ()) + case mapType: + return types2.NewMap(r.typ(), r.typ()) + case signatureType: + r.currPkg = r.pkg() + return r.signature(nil, nil, nil) + + case structType: + r.currPkg = r.pkg() + + fields := make([]*types2.Var, r.uint64()) + tags := make([]string, len(fields)) + for i := range fields { + fpos := r.pos() + fname := r.ident() + ftyp := r.typ() + emb := r.bool() + tag := r.string() + + fields[i] = types2.NewField(fpos, r.currPkg, fname, ftyp, emb) + tags[i] = tag + } + return types2.NewStruct(fields, tags) + + case interfaceType: + r.currPkg = r.pkg() + + embeddeds := make([]types2.Type, r.uint64()) + for i := range embeddeds { + _ = r.pos() + embeddeds[i] = r.typ() + } + + methods := make([]*types2.Func, r.uint64()) + for i := range methods { + mpos := r.pos() + mname := r.ident() + + // TODO(mdempsky): Matches bimport.go, but I + // don't agree with this. + var recv *types2.Var + if base != nil { + recv = types2.NewVar(syntax.Pos{}, r.currPkg, "", base) + } + + msig := r.signature(recv, nil, nil) + methods[i] = types2.NewFunc(mpos, r.currPkg, mname, msig) + } + + typ := types2.NewInterfaceType(methods, embeddeds) + r.p.interfaceList = append(r.p.interfaceList, typ) + return typ + + case typeParamType: + if r.p.exportVersion < iexportVersionGenerics { + errorf("unexpected type param type") + } + pkg, name := r.qualifiedIdent() + id := ident{pkg, name} + if t, ok := r.p.tparamIndex[id]; ok { + // We're already in the process of importing this typeparam. + return t + } + // Otherwise, import the definition of the typeparam now. + r.p.doDecl(pkg, name) + return r.p.tparamIndex[id] + + case instanceType: + if r.p.exportVersion < iexportVersionGenerics { + errorf("unexpected instantiation type") + } + // pos does not matter for instances: they are positioned on the original + // type. + _ = r.pos() + len := r.uint64() + targs := make([]types2.Type, len) + for i := range targs { + targs[i] = r.typ() + } + baseType := r.typ() + // The imported instantiated type doesn't include any methods, so + // we must always use the methods of the base (orig) type. + // TODO provide a non-nil *Context + t, _ := types2.Instantiate(nil, baseType, targs, false) + return t + + case unionType: + if r.p.exportVersion < iexportVersionGenerics { + errorf("unexpected instantiation type") + } + terms := make([]*types2.Term, r.uint64()) + for i := range terms { + terms[i] = types2.NewTerm(r.bool(), r.typ()) + } + return types2.NewUnion(terms) + } +} + +func (r *importReader) kind() itag { + return itag(r.uint64()) +} + +func (r *importReader) signature(recv *types2.Var, rparams, tparams []*types2.TypeParam) *types2.Signature { + params := r.paramList() + results := r.paramList() + variadic := params.Len() > 0 && r.bool() + return types2.NewSignatureType(recv, rparams, tparams, params, results, variadic) +} + +func (r *importReader) tparamList() []*types2.TypeParam { + n := r.uint64() + if n == 0 { + return nil + } + xs := make([]*types2.TypeParam, n) + for i := range xs { + xs[i] = r.typ().(*types2.TypeParam) + } + return xs +} + +func (r *importReader) paramList() *types2.Tuple { + xs := make([]*types2.Var, r.uint64()) + for i := range xs { + xs[i] = r.param() + } + return types2.NewTuple(xs...) +} + +func (r *importReader) param() *types2.Var { + pos := r.pos() + name := r.ident() + typ := r.typ() + return types2.NewParam(pos, r.currPkg, name, typ) +} + +func (r *importReader) bool() bool { + return r.uint64() != 0 +} + +func (r *importReader) int64() int64 { + n, err := binary.ReadVarint(&r.declReader) + if err != nil { + errorf("readVarint: %v", err) + } + return n +} + +func (r *importReader) uint64() uint64 { + n, err := binary.ReadUvarint(&r.declReader) + if err != nil { + errorf("readUvarint: %v", err) + } + return n +} + +func (r *importReader) byte() byte { + x, err := r.declReader.ReadByte() + if err != nil { + errorf("declReader.ReadByte: %v", err) + } + return x +} + +func baseType(typ types2.Type) *types2.Named { + // pointer receivers are never types2.Named types + if p, _ := typ.(*types2.Pointer); p != nil { + typ = p.Elem() + } + // receiver base types are always (possibly generic) types2.Named types + n, _ := typ.(*types2.Named) + return n +} diff --git a/src/cmd/compile/internal/importer/support.go b/src/cmd/compile/internal/importer/support.go new file mode 100644 index 0000000..5810f5e --- /dev/null +++ b/src/cmd/compile/internal/importer/support.go @@ -0,0 +1,152 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements support functionality for iimport.go. + +package importer + +import ( + "cmd/compile/internal/base" + "cmd/compile/internal/types2" + "fmt" + "go/token" + "internal/pkgbits" + "sync" +) + +func assert(p bool) { + base.Assert(p) +} + +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +// Synthesize a token.Pos +type fakeFileSet struct { + fset *token.FileSet + files map[string]*token.File +} + +func (s *fakeFileSet) pos(file string, line, column int) token.Pos { + // TODO(mdempsky): Make use of column. + + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := s.files[file] + if f == nil { + f = s.fset.AddFile(file, -1, maxlines) + s.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + f.SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func chanDir(d int) types2.ChanDir { + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d { + case 1 /* Crecv */ : + return types2.RecvOnly + case 2 /* Csend */ : + return types2.SendOnly + case 3 /* Cboth */ : + return types2.SendRecv + default: + errorf("unexpected channel dir %d", d) + return 0 + } +} + +var predeclared = []types2.Type{ + // basic types + types2.Typ[types2.Bool], + types2.Typ[types2.Int], + types2.Typ[types2.Int8], + types2.Typ[types2.Int16], + types2.Typ[types2.Int32], + types2.Typ[types2.Int64], + types2.Typ[types2.Uint], + types2.Typ[types2.Uint8], + types2.Typ[types2.Uint16], + types2.Typ[types2.Uint32], + types2.Typ[types2.Uint64], + types2.Typ[types2.Uintptr], + types2.Typ[types2.Float32], + types2.Typ[types2.Float64], + types2.Typ[types2.Complex64], + types2.Typ[types2.Complex128], + types2.Typ[types2.String], + + // basic type aliases + types2.Universe.Lookup("byte").Type(), + types2.Universe.Lookup("rune").Type(), + + // error + types2.Universe.Lookup("error").Type(), + + // untyped types + types2.Typ[types2.UntypedBool], + types2.Typ[types2.UntypedInt], + types2.Typ[types2.UntypedRune], + types2.Typ[types2.UntypedFloat], + types2.Typ[types2.UntypedComplex], + types2.Typ[types2.UntypedString], + types2.Typ[types2.UntypedNil], + + // package unsafe + types2.Typ[types2.UnsafePointer], + + // invalid type + types2.Typ[types2.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + // not to be confused with the universe any + anyType{}, + + // comparable + types2.Universe.Lookup("comparable").Type(), + + // any + types2.Universe.Lookup("any").Type(), +} + +type anyType struct{} + +func (t anyType) Underlying() types2.Type { return t } +func (t anyType) String() string { return "any" } + +// See cmd/compile/internal/noder.derivedInfo. +type derivedInfo struct { + idx pkgbits.Index + needed bool +} + +// See cmd/compile/internal/noder.typeInfo. +type typeInfo struct { + idx pkgbits.Index + derived bool +} diff --git a/src/cmd/compile/internal/importer/testdata/a.go b/src/cmd/compile/internal/importer/testdata/a.go new file mode 100644 index 0000000..56e4292 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/a.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue13566 + +package a + +import "encoding/json" + +type A struct { + a *A + json json.RawMessage +} diff --git a/src/cmd/compile/internal/importer/testdata/b.go b/src/cmd/compile/internal/importer/testdata/b.go new file mode 100644 index 0000000..4196678 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/b.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue13566 + +package b + +import "./a" + +type A a.A diff --git a/src/cmd/compile/internal/importer/testdata/exports.go b/src/cmd/compile/internal/importer/testdata/exports.go new file mode 100644 index 0000000..91598c0 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/exports.go @@ -0,0 +1,91 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is used to generate an object file which +// serves as test file for gcimporter_test.go. + +package exports + +import "go/ast" + +// Issue 3682: Correctly read dotted identifiers from export data. +const init1 = 0 + +func init() {} + +const ( + C0 int = 0 + C1 = 3.14159265 + C2 = 2.718281828i + C3 = -123.456e-789 + C4 = +123.456e+789 + C5 = 1234i + C6 = "foo\n" + C7 = `bar\n` + C8 = 42 + C9 int = 42 + C10 float64 = 42 +) + +type ( + T1 int + T2 [10]int + T3 []int + T4 *int + T5 chan int + T6a chan<- int + T6b chan (<-chan int) + T6c chan<- (chan int) + T7 <-chan *ast.File + T8 struct{} + T9 struct { + a int + b, c float32 + d []string `go:"tag"` + } + T10 struct { + T8 + T9 + _ *T10 + } + T11 map[int]string + T12 interface{} + T13 interface { + m1() + m2(int) float32 + } + T14 interface { + T12 + T13 + m3(x ...struct{}) []T9 + } + T15 func() + T16 func(int) + T17 func(x int) + T18 func() float32 + T19 func() (x float32) + T20 func(...interface{}) + T21 struct{ next *T21 } + T22 struct{ link *T23 } + T23 struct{ link *T22 } + T24 *T24 + T25 *T26 + T26 *T27 + T27 *T25 + T28 func(T28) T28 +) + +var ( + V0 int + V1 = -991.0 + V2 float32 = 1.2 +) + +func F1() {} +func F2(x int) {} +func F3() int { return 0 } +func F4() float32 { return 0 } +func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10) + +func (p *T1) M1() diff --git a/src/cmd/compile/internal/importer/testdata/generics.go b/src/cmd/compile/internal/importer/testdata/generics.go new file mode 100644 index 0000000..00bf040 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/generics.go @@ -0,0 +1,29 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is used to generate an object file which +// serves as test file for gcimporter_test.go. + +package generics + +type Any any + +var x any + +type T[A, B any] struct { + Left A + Right B +} + +var X T[int, string] = T[int, string]{1, "hi"} + +func ToInt[P interface{ ~int }](p P) int { return int(p) } + +var IntID = ToInt[int] + +type G[C comparable] int + +func ImplicitFunc[T ~int]() {} + +type ImplicitType[T ~int] int diff --git a/src/cmd/compile/internal/importer/testdata/issue15920.go b/src/cmd/compile/internal/importer/testdata/issue15920.go new file mode 100644 index 0000000..c70f7d8 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/issue15920.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package p + +// The underlying type of Error is the underlying type of error. +// Make sure we can import this again without problems. +type Error error + +func F() Error { return nil } diff --git a/src/cmd/compile/internal/importer/testdata/issue20046.go b/src/cmd/compile/internal/importer/testdata/issue20046.go new file mode 100644 index 0000000..c63ee82 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/issue20046.go @@ -0,0 +1,9 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package p + +var V interface { + M() +} diff --git a/src/cmd/compile/internal/importer/testdata/issue25301.go b/src/cmd/compile/internal/importer/testdata/issue25301.go new file mode 100644 index 0000000..e3dc98b --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/issue25301.go @@ -0,0 +1,17 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package issue25301 + +type ( + A = interface { + M() + } + T interface { + A + } + S struct{} +) + +func (S) M() { println("m") } diff --git a/src/cmd/compile/internal/importer/testdata/issue25596.go b/src/cmd/compile/internal/importer/testdata/issue25596.go new file mode 100644 index 0000000..8923373 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/issue25596.go @@ -0,0 +1,13 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package issue25596 + +type E interface { + M() T +} + +type T interface { + E +} diff --git a/src/cmd/compile/internal/importer/testdata/p.go b/src/cmd/compile/internal/importer/testdata/p.go new file mode 100644 index 0000000..9e2e705 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/p.go @@ -0,0 +1,13 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue15517 + +package p + +const C = 0 + +var V int + +func F() {} diff --git a/src/cmd/compile/internal/importer/testdata/versions/test.go b/src/cmd/compile/internal/importer/testdata/versions/test.go new file mode 100644 index 0000000..227fc09 --- /dev/null +++ b/src/cmd/compile/internal/importer/testdata/versions/test.go @@ -0,0 +1,28 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// To create a test case for a new export format version, +// build this package with the latest compiler and store +// the resulting .a file appropriately named in the versions +// directory. The VersionHandling test will pick it up. +// +// In the testdata/versions: +// +// go build -o test_go1.$X_$Y.a test.go +// +// with $X = Go version and $Y = export format version +// (add 'b' or 'i' to distinguish between binary and +// indexed format starting with 1.11 as long as both +// formats are supported). +// +// Make sure this source is extended such that it exercises +// whatever export format change has taken place. + +package test + +// Any release before and including Go 1.7 didn't encode +// the package for a blank struct field. +type BlankField struct { + _ int +} diff --git a/src/cmd/compile/internal/importer/ureader.go b/src/cmd/compile/internal/importer/ureader.go new file mode 100644 index 0000000..f5c2f41 --- /dev/null +++ b/src/cmd/compile/internal/importer/ureader.go @@ -0,0 +1,535 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package importer + +import ( + "cmd/compile/internal/base" + "cmd/compile/internal/syntax" + "cmd/compile/internal/types2" + "cmd/internal/src" + "internal/pkgbits" +) + +type pkgReader struct { + pkgbits.PkgDecoder + + ctxt *types2.Context + imports map[string]*types2.Package + + posBases []*syntax.PosBase + pkgs []*types2.Package + typs []types2.Type +} + +func ReadPackage(ctxt *types2.Context, imports map[string]*types2.Package, input pkgbits.PkgDecoder) *types2.Package { + pr := pkgReader{ + PkgDecoder: input, + + ctxt: ctxt, + imports: imports, + + posBases: make([]*syntax.PosBase, input.NumElems(pkgbits.RelocPosBase)), + pkgs: make([]*types2.Package, input.NumElems(pkgbits.RelocPkg)), + typs: make([]types2.Type, input.NumElems(pkgbits.RelocType)), + } + + r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic) + pkg := r.pkg() + r.Bool() // TODO(mdempsky): Remove; was "has init" + + for i, n := 0, r.Len(); i < n; i++ { + // As if r.obj(), but avoiding the Scope.Lookup call, + // to avoid eager loading of imports. + r.Sync(pkgbits.SyncObject) + assert(!r.Bool()) + r.p.objIdx(r.Reloc(pkgbits.RelocObj)) + assert(r.Len() == 0) + } + + r.Sync(pkgbits.SyncEOF) + + pkg.MarkComplete() + return pkg +} + +type reader struct { + pkgbits.Decoder + + p *pkgReader + + dict *readerDict +} + +type readerDict struct { + bounds []typeInfo + + tparams []*types2.TypeParam + + derived []derivedInfo + derivedTypes []types2.Type +} + +type readerTypeBound struct { + derived bool + boundIdx int +} + +func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader { + return &reader{ + Decoder: pr.NewDecoder(k, idx, marker), + p: pr, + } +} + +func (pr *pkgReader) tempReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader { + return &reader{ + Decoder: pr.TempDecoder(k, idx, marker), + p: pr, + } +} + +func (pr *pkgReader) retireReader(r *reader) { + pr.RetireDecoder(&r.Decoder) +} + +// @@@ Positions + +func (r *reader) pos() syntax.Pos { + r.Sync(pkgbits.SyncPos) + if !r.Bool() { + return syntax.Pos{} + } + + // TODO(mdempsky): Delta encoding. + posBase := r.posBase() + line := r.Uint() + col := r.Uint() + return syntax.MakePos(posBase, line, col) +} + +func (r *reader) posBase() *syntax.PosBase { + return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase)) +} + +func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) *syntax.PosBase { + if b := pr.posBases[idx]; b != nil { + return b + } + var b *syntax.PosBase + { + r := pr.tempReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase) + + filename := r.String() + + if r.Bool() { + b = syntax.NewTrimmedFileBase(filename, true) + } else { + pos := r.pos() + line := r.Uint() + col := r.Uint() + b = syntax.NewLineBase(pos, filename, true, line, col) + } + pr.retireReader(r) + } + + pr.posBases[idx] = b + return b +} + +// @@@ Packages + +func (r *reader) pkg() *types2.Package { + r.Sync(pkgbits.SyncPkg) + return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg)) +} + +func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types2.Package { + // TODO(mdempsky): Consider using some non-nil pointer to indicate + // the universe scope, so we don't need to keep re-reading it. + if pkg := pr.pkgs[idx]; pkg != nil { + return pkg + } + + pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg() + pr.pkgs[idx] = pkg + return pkg +} + +func (r *reader) doPkg() *types2.Package { + path := r.String() + switch path { + case "": + path = r.p.PkgPath() + case "builtin": + return nil // universe + case "unsafe": + return types2.Unsafe + } + + if pkg := r.p.imports[path]; pkg != nil { + return pkg + } + + name := r.String() + pkg := types2.NewPackage(path, name) + r.p.imports[path] = pkg + + // TODO(mdempsky): The list of imported packages is important for + // go/types, but we could probably skip populating it for types2. + imports := make([]*types2.Package, r.Len()) + for i := range imports { + imports[i] = r.pkg() + } + pkg.SetImports(imports) + + return pkg +} + +// @@@ Types + +func (r *reader) typ() types2.Type { + return r.p.typIdx(r.typInfo(), r.dict) +} + +func (r *reader) typInfo() typeInfo { + r.Sync(pkgbits.SyncType) + if r.Bool() { + return typeInfo{idx: pkgbits.Index(r.Len()), derived: true} + } + return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false} +} + +func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types2.Type { + idx := info.idx + var where *types2.Type + if info.derived { + where = &dict.derivedTypes[idx] + idx = dict.derived[idx].idx + } else { + where = &pr.typs[idx] + } + + if typ := *where; typ != nil { + return typ + } + + var typ types2.Type + { + r := pr.tempReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx) + r.dict = dict + + typ = r.doTyp() + assert(typ != nil) + pr.retireReader(r) + } + + // See comment in pkgReader.typIdx explaining how this happens. + if prev := *where; prev != nil { + return prev + } + + *where = typ + return typ +} + +func (r *reader) doTyp() (res types2.Type) { + switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag { + default: + base.FatalfAt(src.NoXPos, "unhandled type tag: %v", tag) + panic("unreachable") + + case pkgbits.TypeBasic: + return types2.Typ[r.Len()] + + case pkgbits.TypeNamed: + obj, targs := r.obj() + name := obj.(*types2.TypeName) + if len(targs) != 0 { + t, _ := types2.Instantiate(r.p.ctxt, name.Type(), targs, false) + return t + } + return name.Type() + + case pkgbits.TypeTypeParam: + return r.dict.tparams[r.Len()] + + case pkgbits.TypeArray: + len := int64(r.Uint64()) + return types2.NewArray(r.typ(), len) + case pkgbits.TypeChan: + dir := types2.ChanDir(r.Len()) + return types2.NewChan(dir, r.typ()) + case pkgbits.TypeMap: + return types2.NewMap(r.typ(), r.typ()) + case pkgbits.TypePointer: + return types2.NewPointer(r.typ()) + case pkgbits.TypeSignature: + return r.signature(nil, nil, nil) + case pkgbits.TypeSlice: + return types2.NewSlice(r.typ()) + case pkgbits.TypeStruct: + return r.structType() + case pkgbits.TypeInterface: + return r.interfaceType() + case pkgbits.TypeUnion: + return r.unionType() + } +} + +func (r *reader) structType() *types2.Struct { + fields := make([]*types2.Var, r.Len()) + var tags []string + for i := range fields { + pos := r.pos() + pkg, name := r.selector() + ftyp := r.typ() + tag := r.String() + embedded := r.Bool() + + fields[i] = types2.NewField(pos, pkg, name, ftyp, embedded) + if tag != "" { + for len(tags) < i { + tags = append(tags, "") + } + tags = append(tags, tag) + } + } + return types2.NewStruct(fields, tags) +} + +func (r *reader) unionType() *types2.Union { + terms := make([]*types2.Term, r.Len()) + for i := range terms { + terms[i] = types2.NewTerm(r.Bool(), r.typ()) + } + return types2.NewUnion(terms) +} + +func (r *reader) interfaceType() *types2.Interface { + methods := make([]*types2.Func, r.Len()) + embeddeds := make([]types2.Type, r.Len()) + implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool() + + for i := range methods { + pos := r.pos() + pkg, name := r.selector() + mtyp := r.signature(nil, nil, nil) + methods[i] = types2.NewFunc(pos, pkg, name, mtyp) + } + + for i := range embeddeds { + embeddeds[i] = r.typ() + } + + iface := types2.NewInterfaceType(methods, embeddeds) + if implicit { + iface.MarkImplicit() + } + return iface +} + +func (r *reader) signature(recv *types2.Var, rtparams, tparams []*types2.TypeParam) *types2.Signature { + r.Sync(pkgbits.SyncSignature) + + params := r.params() + results := r.params() + variadic := r.Bool() + + return types2.NewSignatureType(recv, rtparams, tparams, params, results, variadic) +} + +func (r *reader) params() *types2.Tuple { + r.Sync(pkgbits.SyncParams) + params := make([]*types2.Var, r.Len()) + for i := range params { + params[i] = r.param() + } + return types2.NewTuple(params...) +} + +func (r *reader) param() *types2.Var { + r.Sync(pkgbits.SyncParam) + + pos := r.pos() + pkg, name := r.localIdent() + typ := r.typ() + + return types2.NewParam(pos, pkg, name, typ) +} + +// @@@ Objects + +func (r *reader) obj() (types2.Object, []types2.Type) { + r.Sync(pkgbits.SyncObject) + + assert(!r.Bool()) + + pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj)) + obj := pkg.Scope().Lookup(name) + + targs := make([]types2.Type, r.Len()) + for i := range targs { + targs[i] = r.typ() + } + + return obj, targs +} + +func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types2.Package, string) { + var objPkg *types2.Package + var objName string + var tag pkgbits.CodeObj + { + rname := pr.tempReader(pkgbits.RelocName, idx, pkgbits.SyncObject1) + + objPkg, objName = rname.qualifiedIdent() + assert(objName != "") + + tag = pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj)) + pr.retireReader(rname) + } + + if tag == pkgbits.ObjStub { + base.Assertf(objPkg == nil || objPkg == types2.Unsafe, "unexpected stub package: %v", objPkg) + return objPkg, objName + } + + objPkg.Scope().InsertLazy(objName, func() types2.Object { + dict := pr.objDictIdx(idx) + + r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1) + r.dict = dict + + switch tag { + default: + panic("weird") + + case pkgbits.ObjAlias: + pos := r.pos() + typ := r.typ() + return types2.NewTypeName(pos, objPkg, objName, typ) + + case pkgbits.ObjConst: + pos := r.pos() + typ := r.typ() + val := r.Value() + return types2.NewConst(pos, objPkg, objName, typ, val) + + case pkgbits.ObjFunc: + pos := r.pos() + tparams := r.typeParamNames() + sig := r.signature(nil, nil, tparams) + return types2.NewFunc(pos, objPkg, objName, sig) + + case pkgbits.ObjType: + pos := r.pos() + + return types2.NewTypeNameLazy(pos, objPkg, objName, func(named *types2.Named) (tparams []*types2.TypeParam, underlying types2.Type, methods []*types2.Func) { + tparams = r.typeParamNames() + + // TODO(mdempsky): Rewrite receiver types to underlying is an + // Interface? The go/types importer does this (I think because + // unit tests expected that), but cmd/compile doesn't care + // about it, so maybe we can avoid worrying about that here. + underlying = r.typ().Underlying() + + methods = make([]*types2.Func, r.Len()) + for i := range methods { + methods[i] = r.method() + } + + return + }) + + case pkgbits.ObjVar: + pos := r.pos() + typ := r.typ() + return types2.NewVar(pos, objPkg, objName, typ) + } + }) + + return objPkg, objName +} + +func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict { + var dict readerDict + { + r := pr.tempReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1) + + if implicits := r.Len(); implicits != 0 { + base.Fatalf("unexpected object with %v implicit type parameter(s)", implicits) + } + + dict.bounds = make([]typeInfo, r.Len()) + for i := range dict.bounds { + dict.bounds[i] = r.typInfo() + } + + dict.derived = make([]derivedInfo, r.Len()) + dict.derivedTypes = make([]types2.Type, len(dict.derived)) + for i := range dict.derived { + dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()} + } + + pr.retireReader(r) + } + // function references follow, but reader doesn't need those + + return &dict +} + +func (r *reader) typeParamNames() []*types2.TypeParam { + r.Sync(pkgbits.SyncTypeParamNames) + + // Note: This code assumes it only processes objects without + // implement type parameters. This is currently fine, because + // reader is only used to read in exported declarations, which are + // always package scoped. + + if len(r.dict.bounds) == 0 { + return nil + } + + // Careful: Type parameter lists may have cycles. To allow for this, + // we construct the type parameter list in two passes: first we + // create all the TypeNames and TypeParams, then we construct and + // set the bound type. + + r.dict.tparams = make([]*types2.TypeParam, len(r.dict.bounds)) + for i := range r.dict.bounds { + pos := r.pos() + pkg, name := r.localIdent() + + tname := types2.NewTypeName(pos, pkg, name, nil) + r.dict.tparams[i] = types2.NewTypeParam(tname, nil) + } + + for i, bound := range r.dict.bounds { + r.dict.tparams[i].SetConstraint(r.p.typIdx(bound, r.dict)) + } + + return r.dict.tparams +} + +func (r *reader) method() *types2.Func { + r.Sync(pkgbits.SyncMethod) + pos := r.pos() + pkg, name := r.selector() + + rtparams := r.typeParamNames() + sig := r.signature(r.param(), rtparams, nil) + + _ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go. + return types2.NewFunc(pos, pkg, name, sig) +} + +func (r *reader) qualifiedIdent() (*types2.Package, string) { return r.ident(pkgbits.SyncSym) } +func (r *reader) localIdent() (*types2.Package, string) { return r.ident(pkgbits.SyncLocalIdent) } +func (r *reader) selector() (*types2.Package, string) { return r.ident(pkgbits.SyncSelector) } + +func (r *reader) ident(marker pkgbits.SyncMarker) (*types2.Package, string) { + r.Sync(marker) + return r.pkg(), r.String() +} |