diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 13:14:23 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 13:14:23 +0000 |
commit | 73df946d56c74384511a194dd01dbe099584fd1a (patch) | |
tree | fd0bcea490dd81327ddfbb31e215439672c9a068 /src/cmd/cover | |
parent | Initial commit. (diff) | |
download | golang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.tar.xz golang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.zip |
Adding upstream version 1.16.10.upstream/1.16.10upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/cmd/cover')
-rw-r--r-- | src/cmd/cover/cover.go | 729 | ||||
-rw-r--r-- | src/cmd/cover/cover_test.go | 573 | ||||
-rw-r--r-- | src/cmd/cover/doc.go | 25 | ||||
-rw-r--r-- | src/cmd/cover/func.go | 246 | ||||
-rw-r--r-- | src/cmd/cover/html.go | 305 | ||||
-rw-r--r-- | src/cmd/cover/pkgname_test.go | 31 | ||||
-rw-r--r-- | src/cmd/cover/profile.go | 220 | ||||
-rw-r--r-- | src/cmd/cover/testdata/directives.go | 40 | ||||
-rw-r--r-- | src/cmd/cover/testdata/html/html.go | 30 | ||||
-rw-r--r-- | src/cmd/cover/testdata/html/html.golden | 18 | ||||
-rw-r--r-- | src/cmd/cover/testdata/html/html_test.go | 8 | ||||
-rw-r--r-- | src/cmd/cover/testdata/main.go | 116 | ||||
-rw-r--r-- | src/cmd/cover/testdata/p.go | 27 | ||||
-rw-r--r-- | src/cmd/cover/testdata/profile.cov | 5 | ||||
-rw-r--r-- | src/cmd/cover/testdata/test.go | 299 | ||||
-rw-r--r-- | src/cmd/cover/testdata/toolexec.go | 33 |
16 files changed, 2705 insertions, 0 deletions
diff --git a/src/cmd/cover/cover.go b/src/cmd/cover/cover.go new file mode 100644 index 0000000..7ee0008 --- /dev/null +++ b/src/cmd/cover/cover.go @@ -0,0 +1,729 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bytes" + "flag" + "fmt" + "go/ast" + "go/parser" + "go/token" + "io" + "log" + "os" + "sort" + + "cmd/internal/edit" + "cmd/internal/objabi" +) + +const usageMessage = "" + + `Usage of 'go tool cover': +Given a coverage profile produced by 'go test': + go test -coverprofile=c.out + +Open a web browser displaying annotated source code: + go tool cover -html=c.out + +Write out an HTML file instead of launching a web browser: + go tool cover -html=c.out -o coverage.html + +Display coverage percentages to stdout for each function: + go tool cover -func=c.out + +Finally, to generate modified source code with coverage annotations +(what go test -cover does): + go tool cover -mode=set -var=CoverageVariableName program.go +` + +func usage() { + fmt.Fprintln(os.Stderr, usageMessage) + fmt.Fprintln(os.Stderr, "Flags:") + flag.PrintDefaults() + fmt.Fprintln(os.Stderr, "\n Only one of -html, -func, or -mode may be set.") + os.Exit(2) +} + +var ( + mode = flag.String("mode", "", "coverage mode: set, count, atomic") + varVar = flag.String("var", "GoCover", "name of coverage variable to generate") + output = flag.String("o", "", "file for output; default: stdout") + htmlOut = flag.String("html", "", "generate HTML representation of coverage profile") + funcOut = flag.String("func", "", "output coverage profile information for each function") +) + +var profile string // The profile to read; the value of -html or -func + +var counterStmt func(*File, string) string + +const ( + atomicPackagePath = "sync/atomic" + atomicPackageName = "_cover_atomic_" +) + +func main() { + objabi.AddVersionFlag() + flag.Usage = usage + flag.Parse() + + // Usage information when no arguments. + if flag.NFlag() == 0 && flag.NArg() == 0 { + flag.Usage() + } + + err := parseFlags() + if err != nil { + fmt.Fprintln(os.Stderr, err) + fmt.Fprintln(os.Stderr, `For usage information, run "go tool cover -help"`) + os.Exit(2) + } + + // Generate coverage-annotated source. + if *mode != "" { + annotate(flag.Arg(0)) + return + } + + // Output HTML or function coverage information. + if *htmlOut != "" { + err = htmlOutput(profile, *output) + } else { + err = funcOutput(profile, *output) + } + + if err != nil { + fmt.Fprintf(os.Stderr, "cover: %v\n", err) + os.Exit(2) + } +} + +// parseFlags sets the profile and counterStmt globals and performs validations. +func parseFlags() error { + profile = *htmlOut + if *funcOut != "" { + if profile != "" { + return fmt.Errorf("too many options") + } + profile = *funcOut + } + + // Must either display a profile or rewrite Go source. + if (profile == "") == (*mode == "") { + return fmt.Errorf("too many options") + } + + if *varVar != "" && !token.IsIdentifier(*varVar) { + return fmt.Errorf("-var: %q is not a valid identifier", *varVar) + } + + if *mode != "" { + switch *mode { + case "set": + counterStmt = setCounterStmt + case "count": + counterStmt = incCounterStmt + case "atomic": + counterStmt = atomicCounterStmt + default: + return fmt.Errorf("unknown -mode %v", *mode) + } + + if flag.NArg() == 0 { + return fmt.Errorf("missing source file") + } else if flag.NArg() == 1 { + return nil + } + } else if flag.NArg() == 0 { + return nil + } + return fmt.Errorf("too many arguments") +} + +// Block represents the information about a basic block to be recorded in the analysis. +// Note: Our definition of basic block is based on control structures; we don't break +// apart && and ||. We could but it doesn't seem important enough to bother. +type Block struct { + startByte token.Pos + endByte token.Pos + numStmt int +} + +// File is a wrapper for the state of a file used in the parser. +// The basic parse tree walker is a method of this type. +type File struct { + fset *token.FileSet + name string // Name of file. + astFile *ast.File + blocks []Block + content []byte + edit *edit.Buffer +} + +// findText finds text in the original source, starting at pos. +// It correctly skips over comments and assumes it need not +// handle quoted strings. +// It returns a byte offset within f.src. +func (f *File) findText(pos token.Pos, text string) int { + b := []byte(text) + start := f.offset(pos) + i := start + s := f.content + for i < len(s) { + if bytes.HasPrefix(s[i:], b) { + return i + } + if i+2 <= len(s) && s[i] == '/' && s[i+1] == '/' { + for i < len(s) && s[i] != '\n' { + i++ + } + continue + } + if i+2 <= len(s) && s[i] == '/' && s[i+1] == '*' { + for i += 2; ; i++ { + if i+2 > len(s) { + return 0 + } + if s[i] == '*' && s[i+1] == '/' { + i += 2 + break + } + } + continue + } + i++ + } + return -1 +} + +// Visit implements the ast.Visitor interface. +func (f *File) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.BlockStmt: + // If it's a switch or select, the body is a list of case clauses; don't tag the block itself. + if len(n.List) > 0 { + switch n.List[0].(type) { + case *ast.CaseClause: // switch + for _, n := range n.List { + clause := n.(*ast.CaseClause) + f.addCounters(clause.Colon+1, clause.Colon+1, clause.End(), clause.Body, false) + } + return f + case *ast.CommClause: // select + for _, n := range n.List { + clause := n.(*ast.CommClause) + f.addCounters(clause.Colon+1, clause.Colon+1, clause.End(), clause.Body, false) + } + return f + } + } + f.addCounters(n.Lbrace, n.Lbrace+1, n.Rbrace+1, n.List, true) // +1 to step past closing brace. + case *ast.IfStmt: + if n.Init != nil { + ast.Walk(f, n.Init) + } + ast.Walk(f, n.Cond) + ast.Walk(f, n.Body) + if n.Else == nil { + return nil + } + // The elses are special, because if we have + // if x { + // } else if y { + // } + // we want to cover the "if y". To do this, we need a place to drop the counter, + // so we add a hidden block: + // if x { + // } else { + // if y { + // } + // } + elseOffset := f.findText(n.Body.End(), "else") + if elseOffset < 0 { + panic("lost else") + } + f.edit.Insert(elseOffset+4, "{") + f.edit.Insert(f.offset(n.Else.End()), "}") + + // We just created a block, now walk it. + // Adjust the position of the new block to start after + // the "else". That will cause it to follow the "{" + // we inserted above. + pos := f.fset.File(n.Body.End()).Pos(elseOffset + 4) + switch stmt := n.Else.(type) { + case *ast.IfStmt: + block := &ast.BlockStmt{ + Lbrace: pos, + List: []ast.Stmt{stmt}, + Rbrace: stmt.End(), + } + n.Else = block + case *ast.BlockStmt: + stmt.Lbrace = pos + default: + panic("unexpected node type in if") + } + ast.Walk(f, n.Else) + return nil + case *ast.SelectStmt: + // Don't annotate an empty select - creates a syntax error. + if n.Body == nil || len(n.Body.List) == 0 { + return nil + } + case *ast.SwitchStmt: + // Don't annotate an empty switch - creates a syntax error. + if n.Body == nil || len(n.Body.List) == 0 { + if n.Init != nil { + ast.Walk(f, n.Init) + } + if n.Tag != nil { + ast.Walk(f, n.Tag) + } + return nil + } + case *ast.TypeSwitchStmt: + // Don't annotate an empty type switch - creates a syntax error. + if n.Body == nil || len(n.Body.List) == 0 { + if n.Init != nil { + ast.Walk(f, n.Init) + } + ast.Walk(f, n.Assign) + return nil + } + case *ast.FuncDecl: + // Don't annotate functions with blank names - they cannot be executed. + if n.Name.Name == "_" { + return nil + } + } + return f +} + +func annotate(name string) { + fset := token.NewFileSet() + content, err := os.ReadFile(name) + if err != nil { + log.Fatalf("cover: %s: %s", name, err) + } + parsedFile, err := parser.ParseFile(fset, name, content, parser.ParseComments) + if err != nil { + log.Fatalf("cover: %s: %s", name, err) + } + + file := &File{ + fset: fset, + name: name, + content: content, + edit: edit.NewBuffer(content), + astFile: parsedFile, + } + if *mode == "atomic" { + // Add import of sync/atomic immediately after package clause. + // We do this even if there is an existing import, because the + // existing import may be shadowed at any given place we want + // to refer to it, and our name (_cover_atomic_) is less likely to + // be shadowed. + file.edit.Insert(file.offset(file.astFile.Name.End()), + fmt.Sprintf("; import %s %q", atomicPackageName, atomicPackagePath)) + } + + ast.Walk(file, file.astFile) + newContent := file.edit.Bytes() + + fd := os.Stdout + if *output != "" { + var err error + fd, err = os.Create(*output) + if err != nil { + log.Fatalf("cover: %s", err) + } + } + + fmt.Fprintf(fd, "//line %s:1\n", name) + fd.Write(newContent) + + // After printing the source tree, add some declarations for the counters etc. + // We could do this by adding to the tree, but it's easier just to print the text. + file.addVariables(fd) +} + +// setCounterStmt returns the expression: __count[23] = 1. +func setCounterStmt(f *File, counter string) string { + return fmt.Sprintf("%s = 1", counter) +} + +// incCounterStmt returns the expression: __count[23]++. +func incCounterStmt(f *File, counter string) string { + return fmt.Sprintf("%s++", counter) +} + +// atomicCounterStmt returns the expression: atomic.AddUint32(&__count[23], 1) +func atomicCounterStmt(f *File, counter string) string { + return fmt.Sprintf("%s.AddUint32(&%s, 1)", atomicPackageName, counter) +} + +// newCounter creates a new counter expression of the appropriate form. +func (f *File) newCounter(start, end token.Pos, numStmt int) string { + stmt := counterStmt(f, fmt.Sprintf("%s.Count[%d]", *varVar, len(f.blocks))) + f.blocks = append(f.blocks, Block{start, end, numStmt}) + return stmt +} + +// addCounters takes a list of statements and adds counters to the beginning of +// each basic block at the top level of that list. For instance, given +// +// S1 +// if cond { +// S2 +// } +// S3 +// +// counters will be added before S1 and before S3. The block containing S2 +// will be visited in a separate call. +// TODO: Nested simple blocks get unnecessary (but correct) counters +func (f *File) addCounters(pos, insertPos, blockEnd token.Pos, list []ast.Stmt, extendToClosingBrace bool) { + // Special case: make sure we add a counter to an empty block. Can't do this below + // or we will add a counter to an empty statement list after, say, a return statement. + if len(list) == 0 { + f.edit.Insert(f.offset(insertPos), f.newCounter(insertPos, blockEnd, 0)+";") + return + } + // Make a copy of the list, as we may mutate it and should leave the + // existing list intact. + list = append([]ast.Stmt(nil), list...) + // We have a block (statement list), but it may have several basic blocks due to the + // appearance of statements that affect the flow of control. + for { + // Find first statement that affects flow of control (break, continue, if, etc.). + // It will be the last statement of this basic block. + var last int + end := blockEnd + for last = 0; last < len(list); last++ { + stmt := list[last] + end = f.statementBoundary(stmt) + if f.endsBasicSourceBlock(stmt) { + // If it is a labeled statement, we need to place a counter between + // the label and its statement because it may be the target of a goto + // and thus start a basic block. That is, given + // foo: stmt + // we need to create + // foo: ; stmt + // and mark the label as a block-terminating statement. + // The result will then be + // foo: COUNTER[n]++; stmt + // However, we can't do this if the labeled statement is already + // a control statement, such as a labeled for. + if label, isLabel := stmt.(*ast.LabeledStmt); isLabel && !f.isControl(label.Stmt) { + newLabel := *label + newLabel.Stmt = &ast.EmptyStmt{ + Semicolon: label.Stmt.Pos(), + Implicit: true, + } + end = label.Pos() // Previous block ends before the label. + list[last] = &newLabel + // Open a gap and drop in the old statement, now without a label. + list = append(list, nil) + copy(list[last+1:], list[last:]) + list[last+1] = label.Stmt + } + last++ + extendToClosingBrace = false // Block is broken up now. + break + } + } + if extendToClosingBrace { + end = blockEnd + } + if pos != end { // Can have no source to cover if e.g. blocks abut. + f.edit.Insert(f.offset(insertPos), f.newCounter(pos, end, last)+";") + } + list = list[last:] + if len(list) == 0 { + break + } + pos = list[0].Pos() + insertPos = pos + } +} + +// hasFuncLiteral reports the existence and position of the first func literal +// in the node, if any. If a func literal appears, it usually marks the termination +// of a basic block because the function body is itself a block. +// Therefore we draw a line at the start of the body of the first function literal we find. +// TODO: what if there's more than one? Probably doesn't matter much. +func hasFuncLiteral(n ast.Node) (bool, token.Pos) { + if n == nil { + return false, 0 + } + var literal funcLitFinder + ast.Walk(&literal, n) + return literal.found(), token.Pos(literal) +} + +// statementBoundary finds the location in s that terminates the current basic +// block in the source. +func (f *File) statementBoundary(s ast.Stmt) token.Pos { + // Control flow statements are easy. + switch s := s.(type) { + case *ast.BlockStmt: + // Treat blocks like basic blocks to avoid overlapping counters. + return s.Lbrace + case *ast.IfStmt: + found, pos := hasFuncLiteral(s.Init) + if found { + return pos + } + found, pos = hasFuncLiteral(s.Cond) + if found { + return pos + } + return s.Body.Lbrace + case *ast.ForStmt: + found, pos := hasFuncLiteral(s.Init) + if found { + return pos + } + found, pos = hasFuncLiteral(s.Cond) + if found { + return pos + } + found, pos = hasFuncLiteral(s.Post) + if found { + return pos + } + return s.Body.Lbrace + case *ast.LabeledStmt: + return f.statementBoundary(s.Stmt) + case *ast.RangeStmt: + found, pos := hasFuncLiteral(s.X) + if found { + return pos + } + return s.Body.Lbrace + case *ast.SwitchStmt: + found, pos := hasFuncLiteral(s.Init) + if found { + return pos + } + found, pos = hasFuncLiteral(s.Tag) + if found { + return pos + } + return s.Body.Lbrace + case *ast.SelectStmt: + return s.Body.Lbrace + case *ast.TypeSwitchStmt: + found, pos := hasFuncLiteral(s.Init) + if found { + return pos + } + return s.Body.Lbrace + } + // If not a control flow statement, it is a declaration, expression, call, etc. and it may have a function literal. + // If it does, that's tricky because we want to exclude the body of the function from this block. + // Draw a line at the start of the body of the first function literal we find. + // TODO: what if there's more than one? Probably doesn't matter much. + found, pos := hasFuncLiteral(s) + if found { + return pos + } + return s.End() +} + +// endsBasicSourceBlock reports whether s changes the flow of control: break, if, etc., +// or if it's just problematic, for instance contains a function literal, which will complicate +// accounting due to the block-within-an expression. +func (f *File) endsBasicSourceBlock(s ast.Stmt) bool { + switch s := s.(type) { + case *ast.BlockStmt: + // Treat blocks like basic blocks to avoid overlapping counters. + return true + case *ast.BranchStmt: + return true + case *ast.ForStmt: + return true + case *ast.IfStmt: + return true + case *ast.LabeledStmt: + return true // A goto may branch here, starting a new basic block. + case *ast.RangeStmt: + return true + case *ast.SwitchStmt: + return true + case *ast.SelectStmt: + return true + case *ast.TypeSwitchStmt: + return true + case *ast.ExprStmt: + // Calls to panic change the flow. + // We really should verify that "panic" is the predefined function, + // but without type checking we can't and the likelihood of it being + // an actual problem is vanishingly small. + if call, ok := s.X.(*ast.CallExpr); ok { + if ident, ok := call.Fun.(*ast.Ident); ok && ident.Name == "panic" && len(call.Args) == 1 { + return true + } + } + } + found, _ := hasFuncLiteral(s) + return found +} + +// isControl reports whether s is a control statement that, if labeled, cannot be +// separated from its label. +func (f *File) isControl(s ast.Stmt) bool { + switch s.(type) { + case *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.SelectStmt, *ast.TypeSwitchStmt: + return true + } + return false +} + +// funcLitFinder implements the ast.Visitor pattern to find the location of any +// function literal in a subtree. +type funcLitFinder token.Pos + +func (f *funcLitFinder) Visit(node ast.Node) (w ast.Visitor) { + if f.found() { + return nil // Prune search. + } + switch n := node.(type) { + case *ast.FuncLit: + *f = funcLitFinder(n.Body.Lbrace) + return nil // Prune search. + } + return f +} + +func (f *funcLitFinder) found() bool { + return token.Pos(*f) != token.NoPos +} + +// Sort interface for []block1; used for self-check in addVariables. + +type block1 struct { + Block + index int +} + +type blockSlice []block1 + +func (b blockSlice) Len() int { return len(b) } +func (b blockSlice) Less(i, j int) bool { return b[i].startByte < b[j].startByte } +func (b blockSlice) Swap(i, j int) { b[i], b[j] = b[j], b[i] } + +// offset translates a token position into a 0-indexed byte offset. +func (f *File) offset(pos token.Pos) int { + return f.fset.Position(pos).Offset +} + +// addVariables adds to the end of the file the declarations to set up the counter and position variables. +func (f *File) addVariables(w io.Writer) { + // Self-check: Verify that the instrumented basic blocks are disjoint. + t := make([]block1, len(f.blocks)) + for i := range f.blocks { + t[i].Block = f.blocks[i] + t[i].index = i + } + sort.Sort(blockSlice(t)) + for i := 1; i < len(t); i++ { + if t[i-1].endByte > t[i].startByte { + fmt.Fprintf(os.Stderr, "cover: internal error: block %d overlaps block %d\n", t[i-1].index, t[i].index) + // Note: error message is in byte positions, not token positions. + fmt.Fprintf(os.Stderr, "\t%s:#%d,#%d %s:#%d,#%d\n", + f.name, f.offset(t[i-1].startByte), f.offset(t[i-1].endByte), + f.name, f.offset(t[i].startByte), f.offset(t[i].endByte)) + } + } + + // Declare the coverage struct as a package-level variable. + fmt.Fprintf(w, "\nvar %s = struct {\n", *varVar) + fmt.Fprintf(w, "\tCount [%d]uint32\n", len(f.blocks)) + fmt.Fprintf(w, "\tPos [3 * %d]uint32\n", len(f.blocks)) + fmt.Fprintf(w, "\tNumStmt [%d]uint16\n", len(f.blocks)) + fmt.Fprintf(w, "} {\n") + + // Initialize the position array field. + fmt.Fprintf(w, "\tPos: [3 * %d]uint32{\n", len(f.blocks)) + + // A nice long list of positions. Each position is encoded as follows to reduce size: + // - 32-bit starting line number + // - 32-bit ending line number + // - (16 bit ending column number << 16) | (16-bit starting column number). + for i, block := range f.blocks { + start := f.fset.Position(block.startByte) + end := f.fset.Position(block.endByte) + + start, end = dedup(start, end) + + fmt.Fprintf(w, "\t\t%d, %d, %#x, // [%d]\n", start.Line, end.Line, (end.Column&0xFFFF)<<16|(start.Column&0xFFFF), i) + } + + // Close the position array. + fmt.Fprintf(w, "\t},\n") + + // Initialize the position array field. + fmt.Fprintf(w, "\tNumStmt: [%d]uint16{\n", len(f.blocks)) + + // A nice long list of statements-per-block, so we can give a conventional + // valuation of "percent covered". To save space, it's a 16-bit number, so we + // clamp it if it overflows - won't matter in practice. + for i, block := range f.blocks { + n := block.numStmt + if n > 1<<16-1 { + n = 1<<16 - 1 + } + fmt.Fprintf(w, "\t\t%d, // %d\n", n, i) + } + + // Close the statements-per-block array. + fmt.Fprintf(w, "\t},\n") + + // Close the struct initialization. + fmt.Fprintf(w, "}\n") + + // Emit a reference to the atomic package to avoid + // import and not used error when there's no code in a file. + if *mode == "atomic" { + fmt.Fprintf(w, "var _ = %s.LoadUint32\n", atomicPackageName) + } +} + +// It is possible for positions to repeat when there is a line +// directive that does not specify column information and the input +// has not been passed through gofmt. +// See issues #27530 and #30746. +// Tests are TestHtmlUnformatted and TestLineDup. +// We use a map to avoid duplicates. + +// pos2 is a pair of token.Position values, used as a map key type. +type pos2 struct { + p1, p2 token.Position +} + +// seenPos2 tracks whether we have seen a token.Position pair. +var seenPos2 = make(map[pos2]bool) + +// dedup takes a token.Position pair and returns a pair that does not +// duplicate any existing pair. The returned pair will have the Offset +// fields cleared. +func dedup(p1, p2 token.Position) (r1, r2 token.Position) { + key := pos2{ + p1: p1, + p2: p2, + } + + // We want to ignore the Offset fields in the map, + // since cover uses only file/line/column. + key.p1.Offset = 0 + key.p2.Offset = 0 + + for seenPos2[key] { + key.p2.Column++ + } + seenPos2[key] = true + + return key.p1, key.p2 +} diff --git a/src/cmd/cover/cover_test.go b/src/cmd/cover/cover_test.go new file mode 100644 index 0000000..86c95d1 --- /dev/null +++ b/src/cmd/cover/cover_test.go @@ -0,0 +1,573 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main_test + +import ( + "bufio" + "bytes" + "flag" + "fmt" + "go/ast" + "go/parser" + "go/token" + "internal/testenv" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + "sync" + "testing" +) + +const ( + // Data directory, also the package directory for the test. + testdata = "testdata" +) + +var ( + // Input files. + testMain = filepath.Join(testdata, "main.go") + testTest = filepath.Join(testdata, "test.go") + coverProfile = filepath.Join(testdata, "profile.cov") + toolexecSource = filepath.Join(testdata, "toolexec.go") + + // The HTML test files are in a separate directory + // so they are a complete package. + htmlGolden = filepath.Join(testdata, "html", "html.golden") + + // Temporary files. + tmpTestMain string + coverInput string + coverOutput string + htmlProfile string + htmlHTML string + htmlUDir string + htmlU string + htmlUTest string + htmlUProfile string + htmlUHTML string + lineDupDir string + lineDupGo string + lineDupTestGo string + lineDupProfile string +) + +var ( + // testTempDir is a temporary directory created in TestMain. + testTempDir string + + // testcover is a newly built version of the cover program. + testcover string + + // toolexec is a program to use as the go tool's -toolexec argument. + toolexec string + + // testcoverErr records an error building testcover or toolexec. + testcoverErr error + + // testcoverOnce is used to build testcover once. + testcoverOnce sync.Once + + // toolexecArg is the argument to pass to the go tool. + toolexecArg string +) + +var debug = flag.Bool("debug", false, "keep rewritten files for debugging") + +// We use TestMain to set up a temporary directory and remove it when +// the tests are done. +func TestMain(m *testing.M) { + dir, err := os.MkdirTemp("", "go-testcover") + if err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + os.Setenv("GOPATH", filepath.Join(dir, "_gopath")) + + testTempDir = dir + + tmpTestMain = filepath.Join(dir, "main.go") + coverInput = filepath.Join(dir, "test_line.go") + coverOutput = filepath.Join(dir, "test_cover.go") + htmlProfile = filepath.Join(dir, "html.cov") + htmlHTML = filepath.Join(dir, "html.html") + htmlUDir = filepath.Join(dir, "htmlunformatted") + htmlU = filepath.Join(htmlUDir, "htmlunformatted.go") + htmlUTest = filepath.Join(htmlUDir, "htmlunformatted_test.go") + htmlUProfile = filepath.Join(htmlUDir, "htmlunformatted.cov") + htmlUHTML = filepath.Join(htmlUDir, "htmlunformatted.html") + lineDupDir = filepath.Join(dir, "linedup") + lineDupGo = filepath.Join(lineDupDir, "linedup.go") + lineDupTestGo = filepath.Join(lineDupDir, "linedup_test.go") + lineDupProfile = filepath.Join(lineDupDir, "linedup.out") + + status := m.Run() + + if !*debug { + os.RemoveAll(dir) + } + + os.Exit(status) +} + +// buildCover builds a version of the cover program for testing. +// This ensures that "go test cmd/cover" tests the current cmd/cover. +func buildCover(t *testing.T) { + t.Helper() + testenv.MustHaveGoBuild(t) + testcoverOnce.Do(func() { + var wg sync.WaitGroup + wg.Add(2) + + var err1, err2 error + go func() { + defer wg.Done() + testcover = filepath.Join(testTempDir, "cover.exe") + t.Logf("running [go build -o %s]", testcover) + out, err := exec.Command(testenv.GoToolPath(t), "build", "-o", testcover).CombinedOutput() + if len(out) > 0 { + t.Logf("%s", out) + } + err1 = err + }() + + go func() { + defer wg.Done() + toolexec = filepath.Join(testTempDir, "toolexec.exe") + t.Logf("running [go -build -o %s %s]", toolexec, toolexecSource) + out, err := exec.Command(testenv.GoToolPath(t), "build", "-o", toolexec, toolexecSource).CombinedOutput() + if len(out) > 0 { + t.Logf("%s", out) + } + err2 = err + }() + + wg.Wait() + + testcoverErr = err1 + if err2 != nil && err1 == nil { + testcoverErr = err2 + } + + toolexecArg = "-toolexec=" + toolexec + " " + testcover + }) + if testcoverErr != nil { + t.Fatal("failed to build testcover or toolexec program:", testcoverErr) + } +} + +// Run this shell script, but do it in Go so it can be run by "go test". +// +// replace the word LINE with the line number < testdata/test.go > testdata/test_line.go +// go build -o testcover +// testcover -mode=count -var=CoverTest -o ./testdata/test_cover.go testdata/test_line.go +// go run ./testdata/main.go ./testdata/test.go +// +func TestCover(t *testing.T) { + t.Parallel() + testenv.MustHaveGoRun(t) + buildCover(t) + + // Read in the test file (testTest) and write it, with LINEs specified, to coverInput. + file, err := os.ReadFile(testTest) + if err != nil { + t.Fatal(err) + } + lines := bytes.Split(file, []byte("\n")) + for i, line := range lines { + lines[i] = bytes.ReplaceAll(line, []byte("LINE"), []byte(fmt.Sprint(i+1))) + } + + // Add a function that is not gofmt'ed. This used to cause a crash. + // We don't put it in test.go because then we would have to gofmt it. + // Issue 23927. + lines = append(lines, []byte("func unFormatted() {"), + []byte("\tif true {"), + []byte("\t}else{"), + []byte("\t}"), + []byte("}")) + lines = append(lines, []byte("func unFormatted2(b bool) {if b{}else{}}")) + + if err := os.WriteFile(coverInput, bytes.Join(lines, []byte("\n")), 0666); err != nil { + t.Fatal(err) + } + + // testcover -mode=count -var=thisNameMustBeVeryLongToCauseOverflowOfCounterIncrementStatementOntoNextLineForTest -o ./testdata/test_cover.go testdata/test_line.go + cmd := exec.Command(testcover, "-mode=count", "-var=thisNameMustBeVeryLongToCauseOverflowOfCounterIncrementStatementOntoNextLineForTest", "-o", coverOutput, coverInput) + run(cmd, t) + + cmd = exec.Command(testcover, "-mode=set", "-var=Not_an-identifier", "-o", coverOutput, coverInput) + err = cmd.Run() + if err == nil { + t.Error("Expected cover to fail with an error") + } + + // Copy testmain to testTempDir, so that it is in the same directory + // as coverOutput. + b, err := os.ReadFile(testMain) + if err != nil { + t.Fatal(err) + } + if err := os.WriteFile(tmpTestMain, b, 0444); err != nil { + t.Fatal(err) + } + + // go run ./testdata/main.go ./testdata/test.go + cmd = exec.Command(testenv.GoToolPath(t), "run", tmpTestMain, coverOutput) + run(cmd, t) + + file, err = os.ReadFile(coverOutput) + if err != nil { + t.Fatal(err) + } + // compiler directive must appear right next to function declaration. + if got, err := regexp.MatchString(".*\n//go:nosplit\nfunc someFunction().*", string(file)); err != nil || !got { + t.Error("misplaced compiler directive") + } + // "go:linkname" compiler directive should be present. + if got, err := regexp.MatchString(`.*go\:linkname some\_name some\_name.*`, string(file)); err != nil || !got { + t.Error("'go:linkname' compiler directive not found") + } + + // Other comments should be preserved too. + c := ".*// This comment didn't appear in generated go code.*" + if got, err := regexp.MatchString(c, string(file)); err != nil || !got { + t.Errorf("non compiler directive comment %q not found", c) + } +} + +// TestDirectives checks that compiler directives are preserved and positioned +// correctly. Directives that occur before top-level declarations should remain +// above those declarations, even if they are not part of the block of +// documentation comments. +func TestDirectives(t *testing.T) { + t.Parallel() + buildCover(t) + + // Read the source file and find all the directives. We'll keep + // track of whether each one has been seen in the output. + testDirectives := filepath.Join(testdata, "directives.go") + source, err := os.ReadFile(testDirectives) + if err != nil { + t.Fatal(err) + } + sourceDirectives := findDirectives(source) + + // testcover -mode=atomic ./testdata/directives.go + cmd := exec.Command(testcover, "-mode=atomic", testDirectives) + cmd.Stderr = os.Stderr + output, err := cmd.Output() + if err != nil { + t.Fatal(err) + } + + // Check that all directives are present in the output. + outputDirectives := findDirectives(output) + foundDirective := make(map[string]bool) + for _, p := range sourceDirectives { + foundDirective[p.name] = false + } + for _, p := range outputDirectives { + if found, ok := foundDirective[p.name]; !ok { + t.Errorf("unexpected directive in output: %s", p.text) + } else if found { + t.Errorf("directive found multiple times in output: %s", p.text) + } + foundDirective[p.name] = true + } + for name, found := range foundDirective { + if !found { + t.Errorf("missing directive: %s", name) + } + } + + // Check that directives that start with the name of top-level declarations + // come before the beginning of the named declaration and after the end + // of the previous declaration. + fset := token.NewFileSet() + astFile, err := parser.ParseFile(fset, testDirectives, output, 0) + if err != nil { + t.Fatal(err) + } + + prevEnd := 0 + for _, decl := range astFile.Decls { + var name string + switch d := decl.(type) { + case *ast.FuncDecl: + name = d.Name.Name + case *ast.GenDecl: + if len(d.Specs) == 0 { + // An empty group declaration. We still want to check that + // directives can be associated with it, so we make up a name + // to match directives in the test data. + name = "_empty" + } else if spec, ok := d.Specs[0].(*ast.TypeSpec); ok { + name = spec.Name.Name + } + } + pos := fset.Position(decl.Pos()).Offset + end := fset.Position(decl.End()).Offset + if name == "" { + prevEnd = end + continue + } + for _, p := range outputDirectives { + if !strings.HasPrefix(p.name, name) { + continue + } + if p.offset < prevEnd || pos < p.offset { + t.Errorf("directive %s does not appear before definition %s", p.text, name) + } + } + prevEnd = end + } +} + +type directiveInfo struct { + text string // full text of the comment, not including newline + name string // text after //go: + offset int // byte offset of first slash in comment +} + +func findDirectives(source []byte) []directiveInfo { + var directives []directiveInfo + directivePrefix := []byte("\n//go:") + offset := 0 + for { + i := bytes.Index(source[offset:], directivePrefix) + if i < 0 { + break + } + i++ // skip newline + p := source[offset+i:] + j := bytes.IndexByte(p, '\n') + if j < 0 { + // reached EOF + j = len(p) + } + directive := directiveInfo{ + text: string(p[:j]), + name: string(p[len(directivePrefix)-1 : j]), + offset: offset + i, + } + directives = append(directives, directive) + offset += i + j + } + return directives +} + +// Makes sure that `cover -func=profile.cov` reports accurate coverage. +// Issue #20515. +func TestCoverFunc(t *testing.T) { + t.Parallel() + buildCover(t) + // testcover -func ./testdata/profile.cov + cmd := exec.Command(testcover, "-func", coverProfile) + out, err := cmd.Output() + if err != nil { + if ee, ok := err.(*exec.ExitError); ok { + t.Logf("%s", ee.Stderr) + } + t.Fatal(err) + } + + if got, err := regexp.Match(".*total:.*100.0.*", out); err != nil || !got { + t.Logf("%s", out) + t.Errorf("invalid coverage counts. got=(%v, %v); want=(true; nil)", got, err) + } +} + +// Check that cover produces correct HTML. +// Issue #25767. +func TestCoverHTML(t *testing.T) { + t.Parallel() + testenv.MustHaveGoRun(t) + buildCover(t) + + // go test -coverprofile testdata/html/html.cov cmd/cover/testdata/html + cmd := exec.Command(testenv.GoToolPath(t), "test", toolexecArg, "-coverprofile", htmlProfile, "cmd/cover/testdata/html") + run(cmd, t) + // testcover -html testdata/html/html.cov -o testdata/html/html.html + cmd = exec.Command(testcover, "-html", htmlProfile, "-o", htmlHTML) + run(cmd, t) + + // Extract the parts of the HTML with comment markers, + // and compare against a golden file. + entireHTML, err := os.ReadFile(htmlHTML) + if err != nil { + t.Fatal(err) + } + var out bytes.Buffer + scan := bufio.NewScanner(bytes.NewReader(entireHTML)) + in := false + for scan.Scan() { + line := scan.Text() + if strings.Contains(line, "// START") { + in = true + } + if in { + fmt.Fprintln(&out, line) + } + if strings.Contains(line, "// END") { + in = false + } + } + if scan.Err() != nil { + t.Error(scan.Err()) + } + golden, err := os.ReadFile(htmlGolden) + if err != nil { + t.Fatalf("reading golden file: %v", err) + } + // Ignore white space differences. + // Break into lines, then compare by breaking into words. + goldenLines := strings.Split(string(golden), "\n") + outLines := strings.Split(out.String(), "\n") + // Compare at the line level, stopping at first different line so + // we don't generate tons of output if there's an inserted or deleted line. + for i, goldenLine := range goldenLines { + if i >= len(outLines) { + t.Fatalf("output shorter than golden; stops before line %d: %s\n", i+1, goldenLine) + } + // Convert all white space to simple spaces, for easy comparison. + goldenLine = strings.Join(strings.Fields(goldenLine), " ") + outLine := strings.Join(strings.Fields(outLines[i]), " ") + if outLine != goldenLine { + t.Fatalf("line %d differs: got:\n\t%s\nwant:\n\t%s", i+1, outLine, goldenLine) + } + } + if len(goldenLines) != len(outLines) { + t.Fatalf("output longer than golden; first extra output line %d: %q\n", len(goldenLines)+1, outLines[len(goldenLines)]) + } +} + +// Test HTML processing with a source file not run through gofmt. +// Issue #27350. +func TestHtmlUnformatted(t *testing.T) { + t.Parallel() + testenv.MustHaveGoRun(t) + buildCover(t) + + if err := os.Mkdir(htmlUDir, 0777); err != nil { + t.Fatal(err) + } + + if err := os.WriteFile(filepath.Join(htmlUDir, "go.mod"), []byte("module htmlunformatted\n"), 0666); err != nil { + t.Fatal(err) + } + + const htmlUContents = ` +package htmlunformatted + +var g int + +func F() { +//line x.go:1 + { { F(); goto lab } } +lab: +}` + + const htmlUTestContents = `package htmlunformatted` + + if err := os.WriteFile(htmlU, []byte(htmlUContents), 0444); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(htmlUTest, []byte(htmlUTestContents), 0444); err != nil { + t.Fatal(err) + } + + // go test -covermode=count -coverprofile TMPDIR/htmlunformatted.cov + cmd := exec.Command(testenv.GoToolPath(t), "test", toolexecArg, "-covermode=count", "-coverprofile", htmlUProfile) + cmd.Dir = htmlUDir + run(cmd, t) + + // testcover -html TMPDIR/htmlunformatted.cov -o unformatted.html + cmd = exec.Command(testcover, "-html", htmlUProfile, "-o", htmlUHTML) + cmd.Dir = htmlUDir + run(cmd, t) +} + +// lineDupContents becomes linedup.go in TestFuncWithDuplicateLines. +const lineDupContents = ` +package linedup + +var G int + +func LineDup(c int) { + for i := 0; i < c; i++ { +//line ld.go:100 + if i % 2 == 0 { + G++ + } + if i % 3 == 0 { + G++; G++ + } +//line ld.go:100 + if i % 4 == 0 { + G++; G++; G++ + } + if i % 5 == 0 { + G++; G++; G++; G++ + } + } +} +` + +// lineDupTestContents becomes linedup_test.go in TestFuncWithDuplicateLines. +const lineDupTestContents = ` +package linedup + +import "testing" + +func TestLineDup(t *testing.T) { + LineDup(100) +} +` + +// Test -func with duplicate //line directives with different numbers +// of statements. +func TestFuncWithDuplicateLines(t *testing.T) { + t.Parallel() + testenv.MustHaveGoRun(t) + buildCover(t) + + if err := os.Mkdir(lineDupDir, 0777); err != nil { + t.Fatal(err) + } + + if err := os.WriteFile(filepath.Join(lineDupDir, "go.mod"), []byte("module linedup\n"), 0666); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(lineDupGo, []byte(lineDupContents), 0444); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(lineDupTestGo, []byte(lineDupTestContents), 0444); err != nil { + t.Fatal(err) + } + + // go test -cover -covermode count -coverprofile TMPDIR/linedup.out + cmd := exec.Command(testenv.GoToolPath(t), "test", toolexecArg, "-cover", "-covermode", "count", "-coverprofile", lineDupProfile) + cmd.Dir = lineDupDir + run(cmd, t) + + // testcover -func=TMPDIR/linedup.out + cmd = exec.Command(testcover, "-func", lineDupProfile) + cmd.Dir = lineDupDir + run(cmd, t) +} + +func run(c *exec.Cmd, t *testing.T) { + t.Helper() + t.Log("running", c.Args) + out, err := c.CombinedOutput() + if len(out) > 0 { + t.Logf("%s", out) + } + if err != nil { + t.Fatal(err) + } +} diff --git a/src/cmd/cover/doc.go b/src/cmd/cover/doc.go new file mode 100644 index 0000000..e2c8494 --- /dev/null +++ b/src/cmd/cover/doc.go @@ -0,0 +1,25 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Cover is a program for analyzing the coverage profiles generated by +'go test -coverprofile=cover.out'. + +Cover is also used by 'go test -cover' to rewrite the source code with +annotations to track which parts of each function are executed. +It operates on one Go source file at a time, computing approximate +basic block information by studying the source. It is thus more portable +than binary-rewriting coverage tools, but also a little less capable. +For instance, it does not probe inside && and || expressions, and can +be mildly confused by single statements with multiple function literals. + +When computing coverage of a package that uses cgo, the cover tool +must be applied to the output of cgo preprocessing, not the input, +because cover deletes comments that are significant to cgo. + +For usage information, please see: + go help testflag + go tool cover -help +*/ +package main diff --git a/src/cmd/cover/func.go b/src/cmd/cover/func.go new file mode 100644 index 0000000..ce7c771 --- /dev/null +++ b/src/cmd/cover/func.go @@ -0,0 +1,246 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements the visitor that computes the (line, column)-(line-column) range for each function. + +package main + +import ( + "bufio" + "bytes" + "encoding/json" + "errors" + "fmt" + "go/ast" + "go/parser" + "go/token" + exec "internal/execabs" + "io" + "os" + "path" + "path/filepath" + "runtime" + "strings" + "text/tabwriter" +) + +// funcOutput takes two file names as arguments, a coverage profile to read as input and an output +// file to write ("" means to write to standard output). The function reads the profile and produces +// as output the coverage data broken down by function, like this: +// +// fmt/format.go:30: init 100.0% +// fmt/format.go:57: clearflags 100.0% +// ... +// fmt/scan.go:1046: doScan 100.0% +// fmt/scan.go:1075: advance 96.2% +// fmt/scan.go:1119: doScanf 96.8% +// total: (statements) 91.9% + +func funcOutput(profile, outputFile string) error { + profiles, err := ParseProfiles(profile) + if err != nil { + return err + } + + dirs, err := findPkgs(profiles) + if err != nil { + return err + } + + var out *bufio.Writer + if outputFile == "" { + out = bufio.NewWriter(os.Stdout) + } else { + fd, err := os.Create(outputFile) + if err != nil { + return err + } + defer fd.Close() + out = bufio.NewWriter(fd) + } + defer out.Flush() + + tabber := tabwriter.NewWriter(out, 1, 8, 1, '\t', 0) + defer tabber.Flush() + + var total, covered int64 + for _, profile := range profiles { + fn := profile.FileName + file, err := findFile(dirs, fn) + if err != nil { + return err + } + funcs, err := findFuncs(file) + if err != nil { + return err + } + // Now match up functions and profile blocks. + for _, f := range funcs { + c, t := f.coverage(profile) + fmt.Fprintf(tabber, "%s:%d:\t%s\t%.1f%%\n", fn, f.startLine, f.name, percent(c, t)) + total += t + covered += c + } + } + fmt.Fprintf(tabber, "total:\t(statements)\t%.1f%%\n", percent(covered, total)) + + return nil +} + +// findFuncs parses the file and returns a slice of FuncExtent descriptors. +func findFuncs(name string) ([]*FuncExtent, error) { + fset := token.NewFileSet() + parsedFile, err := parser.ParseFile(fset, name, nil, 0) + if err != nil { + return nil, err + } + visitor := &FuncVisitor{ + fset: fset, + name: name, + astFile: parsedFile, + } + ast.Walk(visitor, visitor.astFile) + return visitor.funcs, nil +} + +// FuncExtent describes a function's extent in the source by file and position. +type FuncExtent struct { + name string + startLine int + startCol int + endLine int + endCol int +} + +// FuncVisitor implements the visitor that builds the function position list for a file. +type FuncVisitor struct { + fset *token.FileSet + name string // Name of file. + astFile *ast.File + funcs []*FuncExtent +} + +// Visit implements the ast.Visitor interface. +func (v *FuncVisitor) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + if n.Body == nil { + // Do not count declarations of assembly functions. + break + } + start := v.fset.Position(n.Pos()) + end := v.fset.Position(n.End()) + fe := &FuncExtent{ + name: n.Name.Name, + startLine: start.Line, + startCol: start.Column, + endLine: end.Line, + endCol: end.Column, + } + v.funcs = append(v.funcs, fe) + } + return v +} + +// coverage returns the fraction of the statements in the function that were covered, as a numerator and denominator. +func (f *FuncExtent) coverage(profile *Profile) (num, den int64) { + // We could avoid making this n^2 overall by doing a single scan and annotating the functions, + // but the sizes of the data structures is never very large and the scan is almost instantaneous. + var covered, total int64 + // The blocks are sorted, so we can stop counting as soon as we reach the end of the relevant block. + for _, b := range profile.Blocks { + if b.StartLine > f.endLine || (b.StartLine == f.endLine && b.StartCol >= f.endCol) { + // Past the end of the function. + break + } + if b.EndLine < f.startLine || (b.EndLine == f.startLine && b.EndCol <= f.startCol) { + // Before the beginning of the function + continue + } + total += int64(b.NumStmt) + if b.Count > 0 { + covered += int64(b.NumStmt) + } + } + return covered, total +} + +// Pkg describes a single package, compatible with the JSON output from 'go list'; see 'go help list'. +type Pkg struct { + ImportPath string + Dir string + Error *struct { + Err string + } +} + +func findPkgs(profiles []*Profile) (map[string]*Pkg, error) { + // Run go list to find the location of every package we care about. + pkgs := make(map[string]*Pkg) + var list []string + for _, profile := range profiles { + if strings.HasPrefix(profile.FileName, ".") || filepath.IsAbs(profile.FileName) { + // Relative or absolute path. + continue + } + pkg := path.Dir(profile.FileName) + if _, ok := pkgs[pkg]; !ok { + pkgs[pkg] = nil + list = append(list, pkg) + } + } + + if len(list) == 0 { + return pkgs, nil + } + + // Note: usually run as "go tool cover" in which case $GOROOT is set, + // in which case runtime.GOROOT() does exactly what we want. + goTool := filepath.Join(runtime.GOROOT(), "bin/go") + cmd := exec.Command(goTool, append([]string{"list", "-e", "-json"}, list...)...) + var stderr bytes.Buffer + cmd.Stderr = &stderr + stdout, err := cmd.Output() + if err != nil { + return nil, fmt.Errorf("cannot run go list: %v\n%s", err, stderr.Bytes()) + } + dec := json.NewDecoder(bytes.NewReader(stdout)) + for { + var pkg Pkg + err := dec.Decode(&pkg) + if err == io.EOF { + break + } + if err != nil { + return nil, fmt.Errorf("decoding go list json: %v", err) + } + pkgs[pkg.ImportPath] = &pkg + } + return pkgs, nil +} + +// findFile finds the location of the named file in GOROOT, GOPATH etc. +func findFile(pkgs map[string]*Pkg, file string) (string, error) { + if strings.HasPrefix(file, ".") || filepath.IsAbs(file) { + // Relative or absolute path. + return file, nil + } + pkg := pkgs[path.Dir(file)] + if pkg != nil { + if pkg.Dir != "" { + return filepath.Join(pkg.Dir, path.Base(file)), nil + } + if pkg.Error != nil { + return "", errors.New(pkg.Error.Err) + } + } + return "", fmt.Errorf("did not find package for %s in go list output", file) +} + +func percent(covered, total int64) float64 { + if total == 0 { + total = 1 // Avoid zero denominator. + } + return 100.0 * float64(covered) / float64(total) +} diff --git a/src/cmd/cover/html.go b/src/cmd/cover/html.go new file mode 100644 index 0000000..b2865c4 --- /dev/null +++ b/src/cmd/cover/html.go @@ -0,0 +1,305 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bufio" + "bytes" + "cmd/internal/browser" + "fmt" + "html/template" + "io" + "math" + "os" + "path/filepath" + "strings" +) + +// htmlOutput reads the profile data from profile and generates an HTML +// coverage report, writing it to outfile. If outfile is empty, +// it writes the report to a temporary file and opens it in a web browser. +func htmlOutput(profile, outfile string) error { + profiles, err := ParseProfiles(profile) + if err != nil { + return err + } + + var d templateData + + dirs, err := findPkgs(profiles) + if err != nil { + return err + } + + for _, profile := range profiles { + fn := profile.FileName + if profile.Mode == "set" { + d.Set = true + } + file, err := findFile(dirs, fn) + if err != nil { + return err + } + src, err := os.ReadFile(file) + if err != nil { + return fmt.Errorf("can't read %q: %v", fn, err) + } + var buf strings.Builder + err = htmlGen(&buf, src, profile.Boundaries(src)) + if err != nil { + return err + } + d.Files = append(d.Files, &templateFile{ + Name: fn, + Body: template.HTML(buf.String()), + Coverage: percentCovered(profile), + }) + } + + var out *os.File + if outfile == "" { + var dir string + dir, err = os.MkdirTemp("", "cover") + if err != nil { + return err + } + out, err = os.Create(filepath.Join(dir, "coverage.html")) + } else { + out, err = os.Create(outfile) + } + if err != nil { + return err + } + err = htmlTemplate.Execute(out, d) + if err2 := out.Close(); err == nil { + err = err2 + } + if err != nil { + return err + } + + if outfile == "" { + if !browser.Open("file://" + out.Name()) { + fmt.Fprintf(os.Stderr, "HTML output written to %s\n", out.Name()) + } + } + + return nil +} + +// percentCovered returns, as a percentage, the fraction of the statements in +// the profile covered by the test run. +// In effect, it reports the coverage of a given source file. +func percentCovered(p *Profile) float64 { + var total, covered int64 + for _, b := range p.Blocks { + total += int64(b.NumStmt) + if b.Count > 0 { + covered += int64(b.NumStmt) + } + } + if total == 0 { + return 0 + } + return float64(covered) / float64(total) * 100 +} + +// htmlGen generates an HTML coverage report with the provided filename, +// source code, and tokens, and writes it to the given Writer. +func htmlGen(w io.Writer, src []byte, boundaries []Boundary) error { + dst := bufio.NewWriter(w) + for i := range src { + for len(boundaries) > 0 && boundaries[0].Offset == i { + b := boundaries[0] + if b.Start { + n := 0 + if b.Count > 0 { + n = int(math.Floor(b.Norm*9)) + 1 + } + fmt.Fprintf(dst, `<span class="cov%v" title="%v">`, n, b.Count) + } else { + dst.WriteString("</span>") + } + boundaries = boundaries[1:] + } + switch b := src[i]; b { + case '>': + dst.WriteString(">") + case '<': + dst.WriteString("<") + case '&': + dst.WriteString("&") + case '\t': + dst.WriteString(" ") + default: + dst.WriteByte(b) + } + } + return dst.Flush() +} + +// rgb returns an rgb value for the specified coverage value +// between 0 (no coverage) and 10 (max coverage). +func rgb(n int) string { + if n == 0 { + return "rgb(192, 0, 0)" // Red + } + // Gradient from gray to green. + r := 128 - 12*(n-1) + g := 128 + 12*(n-1) + b := 128 + 3*(n-1) + return fmt.Sprintf("rgb(%v, %v, %v)", r, g, b) +} + +// colors generates the CSS rules for coverage colors. +func colors() template.CSS { + var buf bytes.Buffer + for i := 0; i < 11; i++ { + fmt.Fprintf(&buf, ".cov%v { color: %v }\n", i, rgb(i)) + } + return template.CSS(buf.String()) +} + +var htmlTemplate = template.Must(template.New("html").Funcs(template.FuncMap{ + "colors": colors, +}).Parse(tmplHTML)) + +type templateData struct { + Files []*templateFile + Set bool +} + +// PackageName returns a name for the package being shown. +// It does this by choosing the penultimate element of the path +// name, so foo.bar/baz/foo.go chooses 'baz'. This is cheap +// and easy, avoids parsing the Go file, and gets a better answer +// for package main. It returns the empty string if there is +// a problem. +func (td templateData) PackageName() string { + if len(td.Files) == 0 { + return "" + } + fileName := td.Files[0].Name + elems := strings.Split(fileName, "/") // Package path is always slash-separated. + // Return the penultimate non-empty element. + for i := len(elems) - 2; i >= 0; i-- { + if elems[i] != "" { + return elems[i] + } + } + return "" +} + +type templateFile struct { + Name string + Body template.HTML + Coverage float64 +} + +const tmplHTML = ` +<!DOCTYPE html> +<html> + <head> + <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> + <title>{{$pkg := .PackageName}}{{if $pkg}}{{$pkg}}: {{end}}Go Coverage Report</title> + <style> + body { + background: black; + color: rgb(80, 80, 80); + } + body, pre, #legend span { + font-family: Menlo, monospace; + font-weight: bold; + } + #topbar { + background: black; + position: fixed; + top: 0; left: 0; right: 0; + height: 42px; + border-bottom: 1px solid rgb(80, 80, 80); + } + #content { + margin-top: 50px; + } + #nav, #legend { + float: left; + margin-left: 10px; + } + #legend { + margin-top: 12px; + } + #nav { + margin-top: 10px; + } + #legend span { + margin: 0 5px; + } + {{colors}} + </style> + </head> + <body> + <div id="topbar"> + <div id="nav"> + <select id="files"> + {{range $i, $f := .Files}} + <option value="file{{$i}}">{{$f.Name}} ({{printf "%.1f" $f.Coverage}}%)</option> + {{end}} + </select> + </div> + <div id="legend"> + <span>not tracked</span> + {{if .Set}} + <span class="cov0">not covered</span> + <span class="cov8">covered</span> + {{else}} + <span class="cov0">no coverage</span> + <span class="cov1">low coverage</span> + <span class="cov2">*</span> + <span class="cov3">*</span> + <span class="cov4">*</span> + <span class="cov5">*</span> + <span class="cov6">*</span> + <span class="cov7">*</span> + <span class="cov8">*</span> + <span class="cov9">*</span> + <span class="cov10">high coverage</span> + {{end}} + </div> + </div> + <div id="content"> + {{range $i, $f := .Files}} + <pre class="file" id="file{{$i}}" style="display: none">{{$f.Body}}</pre> + {{end}} + </div> + </body> + <script> + (function() { + var files = document.getElementById('files'); + var visible; + files.addEventListener('change', onChange, false); + function select(part) { + if (visible) + visible.style.display = 'none'; + visible = document.getElementById(part); + if (!visible) + return; + files.value = part; + visible.style.display = 'block'; + location.hash = part; + } + function onChange() { + select(files.value); + window.scrollTo(0, 0); + } + if (location.hash != "") { + select(location.hash.substr(1)); + } + if (!visible) { + select("file0"); + } + })(); + </script> +</html> +` diff --git a/src/cmd/cover/pkgname_test.go b/src/cmd/cover/pkgname_test.go new file mode 100644 index 0000000..1c731ad --- /dev/null +++ b/src/cmd/cover/pkgname_test.go @@ -0,0 +1,31 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import "testing" + +func TestPackageName(t *testing.T) { + var tests = []struct { + fileName, pkgName string + }{ + {"", ""}, + {"///", ""}, + {"fmt", ""}, // No Go file, improper form. + {"fmt/foo.go", "fmt"}, + {"encoding/binary/foo.go", "binary"}, + {"encoding/binary/////foo.go", "binary"}, + } + var tf templateFile + for _, test := range tests { + tf.Name = test.fileName + td := templateData{ + Files: []*templateFile{&tf}, + } + got := td.PackageName() + if got != test.pkgName { + t.Errorf("%s: got %s want %s", test.fileName, got, test.pkgName) + } + } +} diff --git a/src/cmd/cover/profile.go b/src/cmd/cover/profile.go new file mode 100644 index 0000000..656c862 --- /dev/null +++ b/src/cmd/cover/profile.go @@ -0,0 +1,220 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file provides support for parsing coverage profiles +// generated by "go test -coverprofile=cover.out". +// It is a copy of golang.org/x/tools/cover/profile.go. + +package main + +import ( + "bufio" + "fmt" + "math" + "os" + "regexp" + "sort" + "strconv" + "strings" +) + +// Profile represents the profiling data for a specific file. +type Profile struct { + FileName string + Mode string + Blocks []ProfileBlock +} + +// ProfileBlock represents a single block of profiling data. +type ProfileBlock struct { + StartLine, StartCol int + EndLine, EndCol int + NumStmt, Count int +} + +type byFileName []*Profile + +func (p byFileName) Len() int { return len(p) } +func (p byFileName) Less(i, j int) bool { return p[i].FileName < p[j].FileName } +func (p byFileName) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// ParseProfiles parses profile data in the specified file and returns a +// Profile for each source file described therein. +func ParseProfiles(fileName string) ([]*Profile, error) { + pf, err := os.Open(fileName) + if err != nil { + return nil, err + } + defer pf.Close() + + files := make(map[string]*Profile) + buf := bufio.NewReader(pf) + // First line is "mode: foo", where foo is "set", "count", or "atomic". + // Rest of file is in the format + // encoding/base64/base64.go:34.44,37.40 3 1 + // where the fields are: name.go:line.column,line.column numberOfStatements count + s := bufio.NewScanner(buf) + mode := "" + for s.Scan() { + line := s.Text() + if mode == "" { + const p = "mode: " + if !strings.HasPrefix(line, p) || line == p { + return nil, fmt.Errorf("bad mode line: %v", line) + } + mode = line[len(p):] + continue + } + m := lineRe.FindStringSubmatch(line) + if m == nil { + return nil, fmt.Errorf("line %q doesn't match expected format: %v", m, lineRe) + } + fn := m[1] + p := files[fn] + if p == nil { + p = &Profile{ + FileName: fn, + Mode: mode, + } + files[fn] = p + } + p.Blocks = append(p.Blocks, ProfileBlock{ + StartLine: toInt(m[2]), + StartCol: toInt(m[3]), + EndLine: toInt(m[4]), + EndCol: toInt(m[5]), + NumStmt: toInt(m[6]), + Count: toInt(m[7]), + }) + } + if err := s.Err(); err != nil { + return nil, err + } + for _, p := range files { + sort.Sort(blocksByStart(p.Blocks)) + // Merge samples from the same location. + j := 1 + for i := 1; i < len(p.Blocks); i++ { + b := p.Blocks[i] + last := p.Blocks[j-1] + if b.StartLine == last.StartLine && + b.StartCol == last.StartCol && + b.EndLine == last.EndLine && + b.EndCol == last.EndCol { + if b.NumStmt != last.NumStmt { + return nil, fmt.Errorf("inconsistent NumStmt: changed from %d to %d", last.NumStmt, b.NumStmt) + } + if mode == "set" { + p.Blocks[j-1].Count |= b.Count + } else { + p.Blocks[j-1].Count += b.Count + } + continue + } + p.Blocks[j] = b + j++ + } + p.Blocks = p.Blocks[:j] + } + // Generate a sorted slice. + profiles := make([]*Profile, 0, len(files)) + for _, profile := range files { + profiles = append(profiles, profile) + } + sort.Sort(byFileName(profiles)) + return profiles, nil +} + +type blocksByStart []ProfileBlock + +func (b blocksByStart) Len() int { return len(b) } +func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b blocksByStart) Less(i, j int) bool { + bi, bj := b[i], b[j] + return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol +} + +var lineRe = regexp.MustCompile(`^(.+):([0-9]+).([0-9]+),([0-9]+).([0-9]+) ([0-9]+) ([0-9]+)$`) + +func toInt(s string) int { + i, err := strconv.Atoi(s) + if err != nil { + panic(err) + } + return i +} + +// Boundary represents the position in a source file of the beginning or end of a +// block as reported by the coverage profile. In HTML mode, it will correspond to +// the opening or closing of a <span> tag and will be used to colorize the source +type Boundary struct { + Offset int // Location as a byte offset in the source file. + Start bool // Is this the start of a block? + Count int // Event count from the cover profile. + Norm float64 // Count normalized to [0..1]. + Index int // Order in input file. +} + +// Boundaries returns a Profile as a set of Boundary objects within the provided src. +func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) { + // Find maximum count. + max := 0 + for _, b := range p.Blocks { + if b.Count > max { + max = b.Count + } + } + // Divisor for normalization. + divisor := math.Log(float64(max)) + + // boundary returns a Boundary, populating the Norm field with a normalized Count. + index := 0 + boundary := func(offset int, start bool, count int) Boundary { + b := Boundary{Offset: offset, Start: start, Count: count, Index: index} + index++ + if !start || count == 0 { + return b + } + if max <= 1 { + b.Norm = 0.8 // Profile is in "set" mode; we want a heat map. Use cov8 in the CSS. + } else if count > 0 { + b.Norm = math.Log(float64(count)) / divisor + } + return b + } + + line, col := 1, 2 // TODO: Why is this 2? + for si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); { + b := p.Blocks[bi] + if b.StartLine == line && b.StartCol == col { + boundaries = append(boundaries, boundary(si, true, b.Count)) + } + if b.EndLine == line && b.EndCol == col || line > b.EndLine { + boundaries = append(boundaries, boundary(si, false, 0)) + bi++ + continue // Don't advance through src; maybe the next block starts here. + } + if src[si] == '\n' { + line++ + col = 0 + } + col++ + si++ + } + sort.Sort(boundariesByPos(boundaries)) + return +} + +type boundariesByPos []Boundary + +func (b boundariesByPos) Len() int { return len(b) } +func (b boundariesByPos) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b boundariesByPos) Less(i, j int) bool { + if b[i].Offset == b[j].Offset { + // Boundaries at the same offset should be ordered according to + // their original position. + return b[i].Index < b[j].Index + } + return b[i].Offset < b[j].Offset +} diff --git a/src/cmd/cover/testdata/directives.go b/src/cmd/cover/testdata/directives.go new file mode 100644 index 0000000..dfb7b8e --- /dev/null +++ b/src/cmd/cover/testdata/directives.go @@ -0,0 +1,40 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is processed by the cover command, then a test verifies that +// all compiler directives are preserved and positioned appropriately. + +//go:a + +//go:b +package main + +//go:c1 + +//go:c2 +//doc +func c() { +} + +//go:d1 + +//doc +//go:d2 +type d int + +//go:e1 + +//doc +//go:e2 +type ( + e int + f int +) + +//go:_empty1 +//doc +//go:_empty2 +type () + +//go:f diff --git a/src/cmd/cover/testdata/html/html.go b/src/cmd/cover/testdata/html/html.go new file mode 100644 index 0000000..2057825 --- /dev/null +++ b/src/cmd/cover/testdata/html/html.go @@ -0,0 +1,30 @@ +package html + +import "fmt" + +// This file is tested by html_test.go. +// The comments below are markers for extracting the annotated source +// from the HTML output. + +// This is a regression test for incorrect sorting of boundaries +// that coincide, specifically for empty select clauses. +// START f +func f() { + ch := make(chan int) + select { + case <-ch: + default: + } +} + +// END f + +// https://golang.org/issue/25767 +// START g +func g() { + if false { + fmt.Printf("Hello") + } +} + +// END g diff --git a/src/cmd/cover/testdata/html/html.golden b/src/cmd/cover/testdata/html/html.golden new file mode 100644 index 0000000..84377d1 --- /dev/null +++ b/src/cmd/cover/testdata/html/html.golden @@ -0,0 +1,18 @@ +// START f +func f() <span class="cov8" title="1">{ + ch := make(chan int) + select </span>{ + case <-ch:<span class="cov0" title="0"></span> + default:<span class="cov8" title="1"></span> + } +} + +// END f +// START g +func g() <span class="cov8" title="1">{ + if false </span><span class="cov0" title="0">{ + fmt.Printf("Hello") + }</span> +} + +// END g diff --git a/src/cmd/cover/testdata/html/html_test.go b/src/cmd/cover/testdata/html/html_test.go new file mode 100644 index 0000000..c15561f --- /dev/null +++ b/src/cmd/cover/testdata/html/html_test.go @@ -0,0 +1,8 @@ +package html + +import "testing" + +func TestAll(t *testing.T) { + f() + g() +} diff --git a/src/cmd/cover/testdata/main.go b/src/cmd/cover/testdata/main.go new file mode 100644 index 0000000..be74b4a --- /dev/null +++ b/src/cmd/cover/testdata/main.go @@ -0,0 +1,116 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Test runner for coverage test. This file is not coverage-annotated; test.go is. +// It knows the coverage counter is called +// "thisNameMustBeVeryLongToCauseOverflowOfCounterIncrementStatementOntoNextLineForTest". + +package main + +import ( + "fmt" + "os" +) + +func main() { + testAll() + verify() +} + +type block struct { + count uint32 + line uint32 +} + +var counters = make(map[block]bool) + +// shorthand for the long counter variable. +var coverTest = &thisNameMustBeVeryLongToCauseOverflowOfCounterIncrementStatementOntoNextLineForTest + +// check records the location and expected value for a counter. +func check(line, count uint32) { + b := block{ + count, + line, + } + counters[b] = true +} + +// checkVal is a version of check that returns its extra argument, +// so it can be used in conditionals. +func checkVal(line, count uint32, val int) int { + b := block{ + count, + line, + } + counters[b] = true + return val +} + +var PASS = true + +// verify checks the expected counts against the actual. It runs after the test has completed. +func verify() { + for b := range counters { + got, index := count(b.line) + if b.count == anything && got != 0 { + got = anything + } + if got != b.count { + fmt.Fprintf(os.Stderr, "test_go:%d expected count %d got %d [counter %d]\n", b.line, b.count, got, index) + PASS = false + } + } + verifyPanic() + if !PASS { + fmt.Fprintf(os.Stderr, "FAIL\n") + os.Exit(2) + } +} + +// verifyPanic is a special check for the known counter that should be +// after the panic call in testPanic. +func verifyPanic() { + if coverTest.Count[panicIndex-1] != 1 { + // Sanity check for test before panic. + fmt.Fprintf(os.Stderr, "bad before panic") + PASS = false + } + if coverTest.Count[panicIndex] != 0 { + fmt.Fprintf(os.Stderr, "bad at panic: %d should be 0\n", coverTest.Count[panicIndex]) + PASS = false + } + if coverTest.Count[panicIndex+1] != 1 { + fmt.Fprintf(os.Stderr, "bad after panic") + PASS = false + } +} + +// count returns the count and index for the counter at the specified line. +func count(line uint32) (uint32, int) { + // Linear search is fine. Choose perfect fit over approximate. + // We can have a closing brace for a range on the same line as a condition for an "else if" + // and we don't want that brace to steal the count for the condition on the "if". + // Therefore we test for a perfect (lo==line && hi==line) match, but if we can't + // find that we take the first imperfect match. + index := -1 + indexLo := uint32(1e9) + for i := range coverTest.Count { + lo, hi := coverTest.Pos[3*i], coverTest.Pos[3*i+1] + if lo == line && line == hi { + return coverTest.Count[i], i + } + // Choose the earliest match (the counters are in unpredictable order). + if lo <= line && line <= hi && indexLo > lo { + index = i + indexLo = lo + } + } + if index == -1 { + fmt.Fprintln(os.Stderr, "cover_test: no counter for line", line) + PASS = false + return 0, 0 + } + return coverTest.Count[index], index +} diff --git a/src/cmd/cover/testdata/p.go b/src/cmd/cover/testdata/p.go new file mode 100644 index 0000000..ce3a8c0 --- /dev/null +++ b/src/cmd/cover/testdata/p.go @@ -0,0 +1,27 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// A package such that there are 3 functions with zero total and covered lines. +// And one with 1 total and covered lines. Reproduces issue #20515. +package p + +//go:noinline +func A() { + +} + +//go:noinline +func B() { + +} + +//go:noinline +func C() { + +} + +//go:noinline +func D() int64 { + return 42 +} diff --git a/src/cmd/cover/testdata/profile.cov b/src/cmd/cover/testdata/profile.cov new file mode 100644 index 0000000..db08602 --- /dev/null +++ b/src/cmd/cover/testdata/profile.cov @@ -0,0 +1,5 @@ +mode: set +./testdata/p.go:10.10,12.2 0 0 +./testdata/p.go:15.10,17.2 0 0 +./testdata/p.go:20.10,22.2 0 0 +./testdata/p.go:25.16,27.2 1 1 diff --git a/src/cmd/cover/testdata/test.go b/src/cmd/cover/testdata/test.go new file mode 100644 index 0000000..b794962 --- /dev/null +++ b/src/cmd/cover/testdata/test.go @@ -0,0 +1,299 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This program is processed by the cover command, and then testAll is called. +// The test driver in main.go can then compare the coverage statistics with expectation. + +// The word LINE is replaced by the line number in this file. When the file is executed, +// the coverage processing has changed the line numbers, so we can't use runtime.Caller. + +package main + +import _ "unsafe" // for go:linkname + +//go:linkname some_name some_name + +const anything = 1e9 // Just some unlikely value that means "we got here, don't care how often" + +func testAll() { + testSimple() + testBlockRun() + testIf() + testFor() + testRange() + testSwitch() + testTypeSwitch() + testSelect1() + testSelect2() + testPanic() + testEmptySwitches() + testFunctionLiteral() + testGoto() +} + +// The indexes of the counters in testPanic are known to main.go +const panicIndex = 3 + +// This test appears first because the index of its counters is known to main.go +func testPanic() { + defer func() { + recover() + }() + check(LINE, 1) + panic("should not get next line") + check(LINE, 0) // this is GoCover.Count[panicIndex] + // The next counter is in testSimple and it will be non-zero. + // If the panic above does not trigger a counter, the test will fail + // because GoCover.Count[panicIndex] will be the one in testSimple. +} + +func testSimple() { + check(LINE, 1) +} + +func testIf() { + if true { + check(LINE, 1) + } else { + check(LINE, 0) + } + if false { + check(LINE, 0) + } else { + check(LINE, 1) + } + for i := 0; i < 3; i++ { + if checkVal(LINE, 3, i) <= 2 { + check(LINE, 3) + } + if checkVal(LINE, 3, i) <= 1 { + check(LINE, 2) + } + if checkVal(LINE, 3, i) <= 0 { + check(LINE, 1) + } + } + for i := 0; i < 3; i++ { + if checkVal(LINE, 3, i) <= 1 { + check(LINE, 2) + } else { + check(LINE, 1) + } + } + for i := 0; i < 3; i++ { + if checkVal(LINE, 3, i) <= 0 { + check(LINE, 1) + } else if checkVal(LINE, 2, i) <= 1 { + check(LINE, 1) + } else if checkVal(LINE, 1, i) <= 2 { + check(LINE, 1) + } else if checkVal(LINE, 0, i) <= 3 { + check(LINE, 0) + } + } + if func(a, b int) bool { return a < b }(3, 4) { + check(LINE, 1) + } +} + +func testFor() { + for i := 0; i < 10; func() { i++; check(LINE, 10) }() { + check(LINE, 10) + } +} + +func testRange() { + for _, f := range []func(){ + func() { check(LINE, 1) }, + } { + f() + check(LINE, 1) + } +} + +func testBlockRun() { + check(LINE, 1) + { + check(LINE, 1) + } + { + check(LINE, 1) + } + check(LINE, 1) + { + check(LINE, 1) + } + { + check(LINE, 1) + } + check(LINE, 1) +} + +func testSwitch() { + for i := 0; i < 5; func() { i++; check(LINE, 5) }() { + goto label2 + label1: + goto label1 + label2: + switch i { + case 0: + check(LINE, 1) + case 1: + check(LINE, 1) + case 2: + check(LINE, 1) + default: + check(LINE, 2) + } + } +} + +func testTypeSwitch() { + var x = []interface{}{1, 2.0, "hi"} + for _, v := range x { + switch func() { check(LINE, 3) }(); v.(type) { + case int: + check(LINE, 1) + case float64: + check(LINE, 1) + case string: + check(LINE, 1) + case complex128: + check(LINE, 0) + default: + check(LINE, 0) + } + } +} + +func testSelect1() { + c := make(chan int) + go func() { + for i := 0; i < 1000; i++ { + c <- i + } + }() + for { + select { + case <-c: + check(LINE, anything) + case <-c: + check(LINE, anything) + default: + check(LINE, 1) + return + } + } +} + +func testSelect2() { + c1 := make(chan int, 1000) + c2 := make(chan int, 1000) + for i := 0; i < 1000; i++ { + c1 <- i + c2 <- i + } + for { + select { + case <-c1: + check(LINE, 1000) + case <-c2: + check(LINE, 1000) + default: + check(LINE, 1) + return + } + } +} + +// Empty control statements created syntax errors. This function +// is here just to be sure that those are handled correctly now. +func testEmptySwitches() { + check(LINE, 1) + switch 3 { + } + check(LINE, 1) + switch i := (interface{})(3).(int); i { + } + check(LINE, 1) + c := make(chan int) + go func() { + check(LINE, 1) + c <- 1 + select {} + }() + <-c + check(LINE, 1) +} + +func testFunctionLiteral() { + a := func(f func()) error { + f() + f() + return nil + } + + b := func(f func()) bool { + f() + f() + return true + } + + check(LINE, 1) + a(func() { + check(LINE, 2) + }) + + if err := a(func() { + check(LINE, 2) + }); err != nil { + } + + switch b(func() { + check(LINE, 2) + }) { + } + + x := 2 + switch x { + case func() int { check(LINE, 1); return 1 }(): + check(LINE, 0) + panic("2=1") + case func() int { check(LINE, 1); return 2 }(): + check(LINE, 1) + case func() int { check(LINE, 0); return 3 }(): + check(LINE, 0) + panic("2=3") + } +} + +func testGoto() { + for i := 0; i < 2; i++ { + if i == 0 { + goto Label + } + check(LINE, 1) + Label: + check(LINE, 2) + } + // Now test that we don't inject empty statements + // between a label and a loop. +loop: + for { + check(LINE, 1) + break loop + } +} + +// This comment didn't appear in generated go code. +func haha() { + // Needed for cover to add counter increment here. + _ = 42 +} + +// Some someFunction. +// +//go:nosplit +func someFunction() { +} diff --git a/src/cmd/cover/testdata/toolexec.go b/src/cmd/cover/testdata/toolexec.go new file mode 100644 index 0000000..386de79 --- /dev/null +++ b/src/cmd/cover/testdata/toolexec.go @@ -0,0 +1,33 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The toolexec program is a helper program for cmd/cover tests. +// It is used so that the go tool will call the newly built version +// of the cover program, rather than the installed one. +// +// The tests arrange to run the go tool with the argument +// -toolexec="/path/to/toolexec /path/to/testcover" +// The go tool will invoke this program (compiled into /path/to/toolexec) +// with the arguments shown above followed by the command to run. +// This program will check whether it is expected to run the cover +// program, and if so replace it with /path/to/testcover. +package main + +import ( + "os" + exec "internal/execabs" + "strings" +) + +func main() { + if strings.HasSuffix(strings.TrimSuffix(os.Args[2], ".exe"), "cover") { + os.Args[2] = os.Args[1] + } + cmd := exec.Command(os.Args[2], os.Args[3:]...) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + os.Exit(1) + } +} |