summaryrefslogtreecommitdiffstats
path: root/src/cmd/cover/profile.go
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
commit73df946d56c74384511a194dd01dbe099584fd1a (patch)
treefd0bcea490dd81327ddfbb31e215439672c9a068 /src/cmd/cover/profile.go
parentInitial commit. (diff)
downloadgolang-1.16-upstream.tar.xz
golang-1.16-upstream.zip
Adding upstream version 1.16.10.upstream/1.16.10upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/cmd/cover/profile.go')
-rw-r--r--src/cmd/cover/profile.go220
1 files changed, 220 insertions, 0 deletions
diff --git a/src/cmd/cover/profile.go b/src/cmd/cover/profile.go
new file mode 100644
index 0000000..656c862
--- /dev/null
+++ b/src/cmd/cover/profile.go
@@ -0,0 +1,220 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file provides support for parsing coverage profiles
+// generated by "go test -coverprofile=cover.out".
+// It is a copy of golang.org/x/tools/cover/profile.go.
+
+package main
+
+import (
+ "bufio"
+ "fmt"
+ "math"
+ "os"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// Profile represents the profiling data for a specific file.
+type Profile struct {
+ FileName string
+ Mode string
+ Blocks []ProfileBlock
+}
+
+// ProfileBlock represents a single block of profiling data.
+type ProfileBlock struct {
+ StartLine, StartCol int
+ EndLine, EndCol int
+ NumStmt, Count int
+}
+
+type byFileName []*Profile
+
+func (p byFileName) Len() int { return len(p) }
+func (p byFileName) Less(i, j int) bool { return p[i].FileName < p[j].FileName }
+func (p byFileName) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
+
+// ParseProfiles parses profile data in the specified file and returns a
+// Profile for each source file described therein.
+func ParseProfiles(fileName string) ([]*Profile, error) {
+ pf, err := os.Open(fileName)
+ if err != nil {
+ return nil, err
+ }
+ defer pf.Close()
+
+ files := make(map[string]*Profile)
+ buf := bufio.NewReader(pf)
+ // First line is "mode: foo", where foo is "set", "count", or "atomic".
+ // Rest of file is in the format
+ // encoding/base64/base64.go:34.44,37.40 3 1
+ // where the fields are: name.go:line.column,line.column numberOfStatements count
+ s := bufio.NewScanner(buf)
+ mode := ""
+ for s.Scan() {
+ line := s.Text()
+ if mode == "" {
+ const p = "mode: "
+ if !strings.HasPrefix(line, p) || line == p {
+ return nil, fmt.Errorf("bad mode line: %v", line)
+ }
+ mode = line[len(p):]
+ continue
+ }
+ m := lineRe.FindStringSubmatch(line)
+ if m == nil {
+ return nil, fmt.Errorf("line %q doesn't match expected format: %v", m, lineRe)
+ }
+ fn := m[1]
+ p := files[fn]
+ if p == nil {
+ p = &Profile{
+ FileName: fn,
+ Mode: mode,
+ }
+ files[fn] = p
+ }
+ p.Blocks = append(p.Blocks, ProfileBlock{
+ StartLine: toInt(m[2]),
+ StartCol: toInt(m[3]),
+ EndLine: toInt(m[4]),
+ EndCol: toInt(m[5]),
+ NumStmt: toInt(m[6]),
+ Count: toInt(m[7]),
+ })
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ for _, p := range files {
+ sort.Sort(blocksByStart(p.Blocks))
+ // Merge samples from the same location.
+ j := 1
+ for i := 1; i < len(p.Blocks); i++ {
+ b := p.Blocks[i]
+ last := p.Blocks[j-1]
+ if b.StartLine == last.StartLine &&
+ b.StartCol == last.StartCol &&
+ b.EndLine == last.EndLine &&
+ b.EndCol == last.EndCol {
+ if b.NumStmt != last.NumStmt {
+ return nil, fmt.Errorf("inconsistent NumStmt: changed from %d to %d", last.NumStmt, b.NumStmt)
+ }
+ if mode == "set" {
+ p.Blocks[j-1].Count |= b.Count
+ } else {
+ p.Blocks[j-1].Count += b.Count
+ }
+ continue
+ }
+ p.Blocks[j] = b
+ j++
+ }
+ p.Blocks = p.Blocks[:j]
+ }
+ // Generate a sorted slice.
+ profiles := make([]*Profile, 0, len(files))
+ for _, profile := range files {
+ profiles = append(profiles, profile)
+ }
+ sort.Sort(byFileName(profiles))
+ return profiles, nil
+}
+
+type blocksByStart []ProfileBlock
+
+func (b blocksByStart) Len() int { return len(b) }
+func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+func (b blocksByStart) Less(i, j int) bool {
+ bi, bj := b[i], b[j]
+ return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol
+}
+
+var lineRe = regexp.MustCompile(`^(.+):([0-9]+).([0-9]+),([0-9]+).([0-9]+) ([0-9]+) ([0-9]+)$`)
+
+func toInt(s string) int {
+ i, err := strconv.Atoi(s)
+ if err != nil {
+ panic(err)
+ }
+ return i
+}
+
+// Boundary represents the position in a source file of the beginning or end of a
+// block as reported by the coverage profile. In HTML mode, it will correspond to
+// the opening or closing of a <span> tag and will be used to colorize the source
+type Boundary struct {
+ Offset int // Location as a byte offset in the source file.
+ Start bool // Is this the start of a block?
+ Count int // Event count from the cover profile.
+ Norm float64 // Count normalized to [0..1].
+ Index int // Order in input file.
+}
+
+// Boundaries returns a Profile as a set of Boundary objects within the provided src.
+func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) {
+ // Find maximum count.
+ max := 0
+ for _, b := range p.Blocks {
+ if b.Count > max {
+ max = b.Count
+ }
+ }
+ // Divisor for normalization.
+ divisor := math.Log(float64(max))
+
+ // boundary returns a Boundary, populating the Norm field with a normalized Count.
+ index := 0
+ boundary := func(offset int, start bool, count int) Boundary {
+ b := Boundary{Offset: offset, Start: start, Count: count, Index: index}
+ index++
+ if !start || count == 0 {
+ return b
+ }
+ if max <= 1 {
+ b.Norm = 0.8 // Profile is in "set" mode; we want a heat map. Use cov8 in the CSS.
+ } else if count > 0 {
+ b.Norm = math.Log(float64(count)) / divisor
+ }
+ return b
+ }
+
+ line, col := 1, 2 // TODO: Why is this 2?
+ for si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); {
+ b := p.Blocks[bi]
+ if b.StartLine == line && b.StartCol == col {
+ boundaries = append(boundaries, boundary(si, true, b.Count))
+ }
+ if b.EndLine == line && b.EndCol == col || line > b.EndLine {
+ boundaries = append(boundaries, boundary(si, false, 0))
+ bi++
+ continue // Don't advance through src; maybe the next block starts here.
+ }
+ if src[si] == '\n' {
+ line++
+ col = 0
+ }
+ col++
+ si++
+ }
+ sort.Sort(boundariesByPos(boundaries))
+ return
+}
+
+type boundariesByPos []Boundary
+
+func (b boundariesByPos) Len() int { return len(b) }
+func (b boundariesByPos) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+func (b boundariesByPos) Less(i, j int) bool {
+ if b[i].Offset == b[j].Offset {
+ // Boundaries at the same offset should be ordered according to
+ // their original position.
+ return b[i].Index < b[j].Index
+ }
+ return b[i].Offset < b[j].Offset
+}