summaryrefslogtreecommitdiffstats
path: root/src/cmd/vendor/github.com
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-16 19:25:22 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-16 19:25:22 +0000
commitf6ad4dcef54c5ce997a4bad5a6d86de229015700 (patch)
tree7cfa4e31ace5c2bd95c72b154d15af494b2bcbef /src/cmd/vendor/github.com
parentInitial commit. (diff)
downloadgolang-1.22-f6ad4dcef54c5ce997a4bad5a6d86de229015700.tar.xz
golang-1.22-f6ad4dcef54c5ce997a4bad5a6d86de229015700.zip
Adding upstream version 1.22.1.upstream/1.22.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/cmd/vendor/github.com')
-rw-r--r--src/cmd/vendor/github.com/google/pprof/AUTHORS7
-rw-r--r--src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS16
-rw-r--r--src/cmd/vendor/github.com/google/pprof/LICENSE202
-rw-r--r--src/cmd/vendor/github.com/google/pprof/driver/driver.go298
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go238
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go181
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go144
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go738
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go180
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go360
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go459
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/config.go371
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go386
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go219
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go616
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go71
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go106
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.css273
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.js714
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/flamegraph.html103
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/graph.html16
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/header.html114
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/plaintext.html18
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/source.html18
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.css78
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.html32
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.js600
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/html/top.html114
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go419
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/options.go100
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go158
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/stacks.go58
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go80
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/tagroot.go133
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go60
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go71
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go473
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go383
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go494
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go1170
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go293
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go216
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/package.go17
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/report.go1334
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/shortnames.go39
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/source.go1114
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go75
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/stacks.go194
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/synth.go39
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go379
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go200
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go131
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/encode.go588
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/filter.go274
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/index.go64
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go315
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go1228
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/merge.go667
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/profile.go856
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/proto.go367
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/prune.go194
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_FLAME_GRAPH_LICENSE201
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_LICENSE13
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/README.md33
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go65
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/index.js13
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package-lock.json1106
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package.json17
-rwxr-xr-xsrc/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/update.sh62
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/webpack.config.js13
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE27
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go297
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore13
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE27
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md3
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/SECURITY.md13
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go4142
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go3362
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/rust.go1165
79 files changed, 29427 insertions, 0 deletions
diff --git a/src/cmd/vendor/github.com/google/pprof/AUTHORS b/src/cmd/vendor/github.com/google/pprof/AUTHORS
new file mode 100644
index 0000000..fd736cb
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/AUTHORS
@@ -0,0 +1,7 @@
+# This is the official list of pprof authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+# Names should be added to this file as:
+# Name or Organization <email address>
+# The email address is not required for organizations.
+Google Inc. \ No newline at end of file
diff --git a/src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS b/src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS
new file mode 100644
index 0000000..8c8c37d
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS
@@ -0,0 +1,16 @@
+# People who have agreed to one of the CLAs and can contribute patches.
+# The AUTHORS file lists the copyright holders; this file
+# lists people. For example, Google employees are listed here
+# but not in AUTHORS, because Google holds the copyright.
+#
+# https://developers.google.com/open-source/cla/individual
+# https://developers.google.com/open-source/cla/corporate
+#
+# Names should be added to this file as:
+# Name <email address>
+Raul Silvera <rsilvera@google.com>
+Tipp Moseley <tipp@google.com>
+Hyoun Kyu Cho <netforce@google.com>
+Martin Spier <spiermar@gmail.com>
+Taco de Wolff <tacodewolff@gmail.com>
+Andrew Hunter <andrewhhunter@gmail.com>
diff --git a/src/cmd/vendor/github.com/google/pprof/LICENSE b/src/cmd/vendor/github.com/google/pprof/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/cmd/vendor/github.com/google/pprof/driver/driver.go b/src/cmd/vendor/github.com/google/pprof/driver/driver.go
new file mode 100644
index 0000000..5a8222f
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/driver/driver.go
@@ -0,0 +1,298 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package driver provides an external entry point to the pprof driver.
+package driver
+
+import (
+ "io"
+ "net/http"
+ "regexp"
+ "time"
+
+ internaldriver "github.com/google/pprof/internal/driver"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// PProf acquires a profile, and symbolizes it using a profile
+// manager. Then it generates a report formatted according to the
+// options selected through the flags package.
+func PProf(o *Options) error {
+ return internaldriver.PProf(o.internalOptions())
+}
+
+func (o *Options) internalOptions() *plugin.Options {
+ var obj plugin.ObjTool
+ if o.Obj != nil {
+ obj = &internalObjTool{o.Obj}
+ }
+ var sym plugin.Symbolizer
+ if o.Sym != nil {
+ sym = &internalSymbolizer{o.Sym}
+ }
+ var httpServer func(args *plugin.HTTPServerArgs) error
+ if o.HTTPServer != nil {
+ httpServer = func(args *plugin.HTTPServerArgs) error {
+ return o.HTTPServer(((*HTTPServerArgs)(args)))
+ }
+ }
+ return &plugin.Options{
+ Writer: o.Writer,
+ Flagset: o.Flagset,
+ Fetch: o.Fetch,
+ Sym: sym,
+ Obj: obj,
+ UI: o.UI,
+ HTTPServer: httpServer,
+ HTTPTransport: o.HTTPTransport,
+ }
+}
+
+// HTTPServerArgs contains arguments needed by an HTTP server that
+// is exporting a pprof web interface.
+type HTTPServerArgs plugin.HTTPServerArgs
+
+// Options groups all the optional plugins into pprof.
+type Options struct {
+ Writer Writer
+ Flagset FlagSet
+ Fetch Fetcher
+ Sym Symbolizer
+ Obj ObjTool
+ UI UI
+ HTTPServer func(*HTTPServerArgs) error
+ HTTPTransport http.RoundTripper
+}
+
+// Writer provides a mechanism to write data under a certain name,
+// typically a filename.
+type Writer interface {
+ Open(name string) (io.WriteCloser, error)
+}
+
+// A FlagSet creates and parses command-line flags.
+// It is similar to the standard flag.FlagSet.
+type FlagSet interface {
+ // Bool, Int, Float64, and String define new flags,
+ // like the functions of the same name in package flag.
+ Bool(name string, def bool, usage string) *bool
+ Int(name string, def int, usage string) *int
+ Float64(name string, def float64, usage string) *float64
+ String(name string, def string, usage string) *string
+
+ // StringList is similar to String but allows multiple values for a
+ // single flag
+ StringList(name string, def string, usage string) *[]*string
+
+ // ExtraUsage returns any additional text that should be printed after the
+ // standard usage message. The extra usage message returned includes all text
+ // added with AddExtraUsage().
+ // The typical use of ExtraUsage is to show any custom flags defined by the
+ // specific pprof plugins being used.
+ ExtraUsage() string
+
+ // AddExtraUsage appends additional text to the end of the extra usage message.
+ AddExtraUsage(eu string)
+
+ // Parse initializes the flags with their values for this run
+ // and returns the non-flag command line arguments.
+ // If an unknown flag is encountered or there are no arguments,
+ // Parse should call usage and return nil.
+ Parse(usage func()) []string
+}
+
+// A Fetcher reads and returns the profile named by src, using
+// the specified duration and timeout. It returns the fetched
+// profile and a string indicating a URL from where the profile
+// was fetched, which may be different than src.
+type Fetcher interface {
+ Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error)
+}
+
+// A Symbolizer introduces symbol information into a profile.
+type Symbolizer interface {
+ Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error
+}
+
+// MappingSources map each profile.Mapping to the source of the profile.
+// The key is either Mapping.File or Mapping.BuildId.
+type MappingSources map[string][]struct {
+ Source string // URL of the source the mapping was collected from
+ Start uint64 // delta applied to addresses from this source (to represent Merge adjustments)
+}
+
+// An ObjTool inspects shared libraries and executable files.
+type ObjTool interface {
+ // Open opens the named object file. If the object is a shared
+ // library, start/limit/offset are the addresses where it is mapped
+ // into memory in the address space being inspected. If the object
+ // is a linux kernel, relocationSymbol is the name of the symbol
+ // corresponding to the start address.
+ Open(file string, start, limit, offset uint64, relocationSymbol string) (ObjFile, error)
+
+ // Disasm disassembles the named object file, starting at
+ // the start address and stopping at (before) the end address.
+ Disasm(file string, start, end uint64, intelSyntax bool) ([]Inst, error)
+}
+
+// An Inst is a single instruction in an assembly listing.
+type Inst struct {
+ Addr uint64 // virtual address of instruction
+ Text string // instruction text
+ Function string // function name
+ File string // source file
+ Line int // source line
+}
+
+// An ObjFile is a single object file: a shared library or executable.
+type ObjFile interface {
+ // Name returns the underlying file name, if available.
+ Name() string
+
+ // ObjAddr returns the objdump address corresponding to a runtime address.
+ ObjAddr(addr uint64) (uint64, error)
+
+ // BuildID returns the GNU build ID of the file, or an empty string.
+ BuildID() string
+
+ // SourceLine reports the source line information for a given
+ // address in the file. Due to inlining, the source line information
+ // is in general a list of positions representing a call stack,
+ // with the leaf function first.
+ SourceLine(addr uint64) ([]Frame, error)
+
+ // Symbols returns a list of symbols in the object file.
+ // If r is not nil, Symbols restricts the list to symbols
+ // with names matching the regular expression.
+ // If addr is not zero, Symbols restricts the list to symbols
+ // containing that address.
+ Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error)
+
+ // Close closes the file, releasing associated resources.
+ Close() error
+}
+
+// A Frame describes a single line in a source file.
+type Frame struct {
+ Func string // name of function
+ File string // source file name
+ Line int // line in file
+}
+
+// A Sym describes a single symbol in an object file.
+type Sym struct {
+ Name []string // names of symbol (many if symbol was dedup'ed)
+ File string // object file containing symbol
+ Start uint64 // start virtual address
+ End uint64 // virtual address of last byte in sym (Start+size-1)
+}
+
+// A UI manages user interactions.
+type UI interface {
+ // Read returns a line of text (a command) read from the user.
+ // prompt is printed before reading the command.
+ ReadLine(prompt string) (string, error)
+
+ // Print shows a message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, Print writes to standard error.
+ // (Standard output is reserved for report data.)
+ Print(...interface{})
+
+ // PrintErr shows an error message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, PrintErr writes to standard error.
+ PrintErr(...interface{})
+
+ // IsTerminal returns whether the UI is known to be tied to an
+ // interactive terminal (as opposed to being redirected to a file).
+ IsTerminal() bool
+
+ // WantBrowser indicates whether browser should be opened with the -http option.
+ WantBrowser() bool
+
+ // SetAutoComplete instructs the UI to call complete(cmd) to obtain
+ // the auto-completion of cmd, if the UI supports auto-completion at all.
+ SetAutoComplete(complete func(string) string)
+}
+
+// internalObjTool is a wrapper to map from the pprof external
+// interface to the internal interface.
+type internalObjTool struct {
+ ObjTool
+}
+
+func (o *internalObjTool) Open(file string, start, limit, offset uint64, relocationSymbol string) (plugin.ObjFile, error) {
+ f, err := o.ObjTool.Open(file, start, limit, offset, relocationSymbol)
+ if err != nil {
+ return nil, err
+ }
+ return &internalObjFile{f}, err
+}
+
+type internalObjFile struct {
+ ObjFile
+}
+
+func (f *internalObjFile) SourceLine(frame uint64) ([]plugin.Frame, error) {
+ frames, err := f.ObjFile.SourceLine(frame)
+ if err != nil {
+ return nil, err
+ }
+ var pluginFrames []plugin.Frame
+ for _, f := range frames {
+ pluginFrames = append(pluginFrames, plugin.Frame(f))
+ }
+ return pluginFrames, nil
+}
+
+func (f *internalObjFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) {
+ syms, err := f.ObjFile.Symbols(r, addr)
+ if err != nil {
+ return nil, err
+ }
+ var pluginSyms []*plugin.Sym
+ for _, s := range syms {
+ ps := plugin.Sym(*s)
+ pluginSyms = append(pluginSyms, &ps)
+ }
+ return pluginSyms, nil
+}
+
+func (o *internalObjTool) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) {
+ insts, err := o.ObjTool.Disasm(file, start, end, intelSyntax)
+ if err != nil {
+ return nil, err
+ }
+ var pluginInst []plugin.Inst
+ for _, inst := range insts {
+ pluginInst = append(pluginInst, plugin.Inst(inst))
+ }
+ return pluginInst, nil
+}
+
+// internalSymbolizer is a wrapper to map from the pprof external
+// interface to the internal interface.
+type internalSymbolizer struct {
+ Symbolizer
+}
+
+func (s *internalSymbolizer) Symbolize(mode string, srcs plugin.MappingSources, prof *profile.Profile) error {
+ isrcs := MappingSources{}
+ for m, s := range srcs {
+ isrcs[m] = s
+ }
+ return s.Symbolizer.Symbolize(mode, isrcs, prof)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go
new file mode 100644
index 0000000..c2e45c6
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go
@@ -0,0 +1,238 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os/exec"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+const (
+ defaultAddr2line = "addr2line"
+
+ // addr2line may produce multiple lines of output. We
+ // use this sentinel to identify the end of the output.
+ sentinel = ^uint64(0)
+)
+
+// addr2Liner is a connection to an addr2line command for obtaining
+// address and line number information from a binary.
+type addr2Liner struct {
+ mu sync.Mutex
+ rw lineReaderWriter
+ base uint64
+
+ // nm holds an addr2Liner using nm tool. Certain versions of addr2line
+ // produce incomplete names due to
+ // https://sourceware.org/bugzilla/show_bug.cgi?id=17541. As a workaround,
+ // the names from nm are used when they look more complete. See addrInfo()
+ // code below for the exact heuristic.
+ nm *addr2LinerNM
+}
+
+// lineReaderWriter is an interface to abstract the I/O to an addr2line
+// process. It writes a line of input to the job, and reads its output
+// one line at a time.
+type lineReaderWriter interface {
+ write(string) error
+ readLine() (string, error)
+ close()
+}
+
+type addr2LinerJob struct {
+ cmd *exec.Cmd
+ in io.WriteCloser
+ out *bufio.Reader
+}
+
+func (a *addr2LinerJob) write(s string) error {
+ _, err := fmt.Fprint(a.in, s+"\n")
+ return err
+}
+
+func (a *addr2LinerJob) readLine() (string, error) {
+ s, err := a.out.ReadString('\n')
+ if err != nil {
+ return "", err
+ }
+ return strings.TrimSpace(s), nil
+}
+
+// close releases any resources used by the addr2liner object.
+func (a *addr2LinerJob) close() {
+ a.in.Close()
+ a.cmd.Wait()
+}
+
+// newAddr2Liner starts the given addr2liner command reporting
+// information about the given executable file. If file is a shared
+// library, base should be the address at which it was mapped in the
+// program under consideration.
+func newAddr2Liner(cmd, file string, base uint64) (*addr2Liner, error) {
+ if cmd == "" {
+ cmd = defaultAddr2line
+ }
+
+ j := &addr2LinerJob{
+ cmd: exec.Command(cmd, "-aif", "-e", file),
+ }
+
+ var err error
+ if j.in, err = j.cmd.StdinPipe(); err != nil {
+ return nil, err
+ }
+
+ outPipe, err := j.cmd.StdoutPipe()
+ if err != nil {
+ return nil, err
+ }
+
+ j.out = bufio.NewReader(outPipe)
+ if err := j.cmd.Start(); err != nil {
+ return nil, err
+ }
+
+ a := &addr2Liner{
+ rw: j,
+ base: base,
+ }
+
+ return a, nil
+}
+
+// readFrame parses the addr2line output for a single address. It
+// returns a populated plugin.Frame and whether it has reached the end of the
+// data.
+func (d *addr2Liner) readFrame() (plugin.Frame, bool) {
+ funcname, err := d.rw.readLine()
+ if err != nil {
+ return plugin.Frame{}, true
+ }
+ if strings.HasPrefix(funcname, "0x") {
+ // If addr2line returns a hex address we can assume it is the
+ // sentinel. Read and ignore next two lines of output from
+ // addr2line
+ d.rw.readLine()
+ d.rw.readLine()
+ return plugin.Frame{}, true
+ }
+
+ fileline, err := d.rw.readLine()
+ if err != nil {
+ return plugin.Frame{}, true
+ }
+
+ linenumber := 0
+
+ if funcname == "??" {
+ funcname = ""
+ }
+
+ if fileline == "??:0" {
+ fileline = ""
+ } else {
+ if i := strings.LastIndex(fileline, ":"); i >= 0 {
+ // Remove discriminator, if present
+ if disc := strings.Index(fileline, " (discriminator"); disc > 0 {
+ fileline = fileline[:disc]
+ }
+ // If we cannot parse a number after the last ":", keep it as
+ // part of the filename.
+ if line, err := strconv.Atoi(fileline[i+1:]); err == nil {
+ linenumber = line
+ fileline = fileline[:i]
+ }
+ }
+ }
+
+ return plugin.Frame{
+ Func: funcname,
+ File: fileline,
+ Line: linenumber}, false
+}
+
+func (d *addr2Liner) rawAddrInfo(addr uint64) ([]plugin.Frame, error) {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+
+ if err := d.rw.write(fmt.Sprintf("%x", addr-d.base)); err != nil {
+ return nil, err
+ }
+
+ if err := d.rw.write(fmt.Sprintf("%x", sentinel)); err != nil {
+ return nil, err
+ }
+
+ resp, err := d.rw.readLine()
+ if err != nil {
+ return nil, err
+ }
+
+ if !strings.HasPrefix(resp, "0x") {
+ return nil, fmt.Errorf("unexpected addr2line output: %s", resp)
+ }
+
+ var stack []plugin.Frame
+ for {
+ frame, end := d.readFrame()
+ if end {
+ break
+ }
+
+ if frame != (plugin.Frame{}) {
+ stack = append(stack, frame)
+ }
+ }
+ return stack, err
+}
+
+// addrInfo returns the stack frame information for a specific program
+// address. It returns nil if the address could not be identified.
+func (d *addr2Liner) addrInfo(addr uint64) ([]plugin.Frame, error) {
+ stack, err := d.rawAddrInfo(addr)
+ if err != nil {
+ return nil, err
+ }
+
+ // Certain versions of addr2line produce incomplete names due to
+ // https://sourceware.org/bugzilla/show_bug.cgi?id=17541. Attempt to replace
+ // the name with a better one from nm.
+ if len(stack) > 0 && d.nm != nil {
+ nm, err := d.nm.addrInfo(addr)
+ if err == nil && len(nm) > 0 {
+ // Last entry in frame list should match since it is non-inlined. As a
+ // simple heuristic, we only switch to the nm-based name if it is longer
+ // by 2 or more characters. We consider nm names that are longer by 1
+ // character insignificant to avoid replacing foo with _foo on MacOS (for
+ // unknown reasons read2line produces the former and nm produces the
+ // latter on MacOS even though both tools are asked to produce mangled
+ // names).
+ nmName := nm[len(nm)-1].Func
+ a2lName := stack[len(stack)-1].Func
+ if len(nmName) > len(a2lName)+1 {
+ stack[len(stack)-1].Func = nmName
+ }
+ }
+ }
+
+ return stack, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go
new file mode 100644
index 0000000..491422f
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go
@@ -0,0 +1,181 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os/exec"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+const (
+ defaultLLVMSymbolizer = "llvm-symbolizer"
+)
+
+// llvmSymbolizer is a connection to an llvm-symbolizer command for
+// obtaining address and line number information from a binary.
+type llvmSymbolizer struct {
+ sync.Mutex
+ filename string
+ rw lineReaderWriter
+ base uint64
+}
+
+type llvmSymbolizerJob struct {
+ cmd *exec.Cmd
+ in io.WriteCloser
+ out *bufio.Reader
+ // llvm-symbolizer requires the symbol type, CODE or DATA, for symbolization.
+ symType string
+}
+
+func (a *llvmSymbolizerJob) write(s string) error {
+ _, err := fmt.Fprintln(a.in, a.symType, s)
+ return err
+}
+
+func (a *llvmSymbolizerJob) readLine() (string, error) {
+ s, err := a.out.ReadString('\n')
+ if err != nil {
+ return "", err
+ }
+ return strings.TrimSpace(s), nil
+}
+
+// close releases any resources used by the llvmSymbolizer object.
+func (a *llvmSymbolizerJob) close() {
+ a.in.Close()
+ a.cmd.Wait()
+}
+
+// newLLVMSymbolizer starts the given llvmSymbolizer command reporting
+// information about the given executable file. If file is a shared
+// library, base should be the address at which it was mapped in the
+// program under consideration.
+func newLLVMSymbolizer(cmd, file string, base uint64, isData bool) (*llvmSymbolizer, error) {
+ if cmd == "" {
+ cmd = defaultLLVMSymbolizer
+ }
+
+ j := &llvmSymbolizerJob{
+ cmd: exec.Command(cmd, "--inlining", "-demangle=false"),
+ symType: "CODE",
+ }
+ if isData {
+ j.symType = "DATA"
+ }
+
+ var err error
+ if j.in, err = j.cmd.StdinPipe(); err != nil {
+ return nil, err
+ }
+
+ outPipe, err := j.cmd.StdoutPipe()
+ if err != nil {
+ return nil, err
+ }
+
+ j.out = bufio.NewReader(outPipe)
+ if err := j.cmd.Start(); err != nil {
+ return nil, err
+ }
+
+ a := &llvmSymbolizer{
+ filename: file,
+ rw: j,
+ base: base,
+ }
+
+ return a, nil
+}
+
+// readFrame parses the llvm-symbolizer output for a single address. It
+// returns a populated plugin.Frame and whether it has reached the end of the
+// data.
+func (d *llvmSymbolizer) readFrame() (plugin.Frame, bool) {
+ funcname, err := d.rw.readLine()
+ if err != nil {
+ return plugin.Frame{}, true
+ }
+
+ switch funcname {
+ case "":
+ return plugin.Frame{}, true
+ case "??":
+ funcname = ""
+ }
+
+ fileline, err := d.rw.readLine()
+ if err != nil {
+ return plugin.Frame{Func: funcname}, true
+ }
+
+ linenumber := 0
+ // The llvm-symbolizer outputs the <file_name>:<line_number>:<column_number>.
+ // When it cannot identify the source code location, it outputs "??:0:0".
+ // Older versions output just the filename and line number, so we check for
+ // both conditions here.
+ if fileline == "??:0" || fileline == "??:0:0" {
+ fileline = ""
+ } else {
+ switch split := strings.Split(fileline, ":"); len(split) {
+ case 1:
+ // filename
+ fileline = split[0]
+ case 2, 3:
+ // filename:line , or
+ // filename:line:disc , or
+ fileline = split[0]
+ if line, err := strconv.Atoi(split[1]); err == nil {
+ linenumber = line
+ }
+ default:
+ // Unrecognized, ignore
+ }
+ }
+
+ return plugin.Frame{Func: funcname, File: fileline, Line: linenumber}, false
+}
+
+// addrInfo returns the stack frame information for a specific program
+// address. It returns nil if the address could not be identified.
+func (d *llvmSymbolizer) addrInfo(addr uint64) ([]plugin.Frame, error) {
+ d.Lock()
+ defer d.Unlock()
+
+ if err := d.rw.write(fmt.Sprintf("%s 0x%x", d.filename, addr-d.base)); err != nil {
+ return nil, err
+ }
+
+ var stack []plugin.Frame
+ for {
+ frame, end := d.readFrame()
+ if end {
+ break
+ }
+
+ if frame != (plugin.Frame{}) {
+ stack = append(stack, frame)
+ }
+ }
+
+ return stack, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go
new file mode 100644
index 0000000..8e0ccc7
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go
@@ -0,0 +1,144 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bufio"
+ "bytes"
+ "io"
+ "os/exec"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+const (
+ defaultNM = "nm"
+)
+
+// addr2LinerNM is a connection to an nm command for obtaining symbol
+// information from a binary.
+type addr2LinerNM struct {
+ m []symbolInfo // Sorted list of symbol addresses from binary.
+}
+
+type symbolInfo struct {
+ address uint64
+ size uint64
+ name string
+ symType string
+}
+
+// isData returns if the symbol has a known data object symbol type.
+func (s *symbolInfo) isData() bool {
+ // The following symbol types are taken from https://linux.die.net/man/1/nm:
+ // Lowercase letter means local symbol, uppercase denotes a global symbol.
+ // - b or B: the symbol is in the uninitialized data section, e.g. .bss;
+ // - d or D: the symbol is in the initialized data section;
+ // - r or R: the symbol is in a read only data section;
+ // - v or V: the symbol is a weak object;
+ // - W: the symbol is a weak symbol that has not been specifically tagged as a
+ // weak object symbol. Experiments with some binaries, showed these to be
+ // mostly data objects.
+ return strings.ContainsAny(s.symType, "bBdDrRvVW")
+}
+
+// newAddr2LinerNM starts the given nm command reporting information about the
+// given executable file. If file is a shared library, base should be the
+// address at which it was mapped in the program under consideration.
+func newAddr2LinerNM(cmd, file string, base uint64) (*addr2LinerNM, error) {
+ if cmd == "" {
+ cmd = defaultNM
+ }
+ var b bytes.Buffer
+ c := exec.Command(cmd, "--numeric-sort", "--print-size", "--format=posix", file)
+ c.Stdout = &b
+ if err := c.Run(); err != nil {
+ return nil, err
+ }
+ return parseAddr2LinerNM(base, &b)
+}
+
+func parseAddr2LinerNM(base uint64, nm io.Reader) (*addr2LinerNM, error) {
+ a := &addr2LinerNM{
+ m: []symbolInfo{},
+ }
+
+ // Parse nm output and populate symbol map.
+ // Skip lines we fail to parse.
+ buf := bufio.NewReader(nm)
+ for {
+ line, err := buf.ReadString('\n')
+ if line == "" && err != nil {
+ if err == io.EOF {
+ break
+ }
+ return nil, err
+ }
+ line = strings.TrimSpace(line)
+ fields := strings.Split(line, " ")
+ if len(fields) != 4 {
+ continue
+ }
+ address, err := strconv.ParseUint(fields[2], 16, 64)
+ if err != nil {
+ continue
+ }
+ size, err := strconv.ParseUint(fields[3], 16, 64)
+ if err != nil {
+ continue
+ }
+ a.m = append(a.m, symbolInfo{
+ address: address + base,
+ size: size,
+ name: fields[0],
+ symType: fields[1],
+ })
+ }
+
+ return a, nil
+}
+
+// addrInfo returns the stack frame information for a specific program
+// address. It returns nil if the address could not be identified.
+func (a *addr2LinerNM) addrInfo(addr uint64) ([]plugin.Frame, error) {
+ if len(a.m) == 0 || addr < a.m[0].address || addr >= (a.m[len(a.m)-1].address+a.m[len(a.m)-1].size) {
+ return nil, nil
+ }
+
+ // Binary search. Search until low, high are separated by 1.
+ low, high := 0, len(a.m)
+ for low+1 < high {
+ mid := (low + high) / 2
+ v := a.m[mid].address
+ if addr == v {
+ low = mid
+ break
+ } else if addr > v {
+ low = mid
+ } else {
+ high = mid
+ }
+ }
+
+ // Address is between a.m[low] and a.m[high]. Pick low, as it represents
+ // [low, high). For data symbols, we use a strict check that the address is in
+ // the [start, start + size) range of a.m[low].
+ if a.m[low].isData() && addr >= (a.m[low].address+a.m[low].size) {
+ return nil, nil
+ }
+ return []plugin.Frame{{Func: a.m[low].name}}, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go
new file mode 100644
index 0000000..efa9167
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go
@@ -0,0 +1,738 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package binutils provides access to the GNU binutils.
+package binutils
+
+import (
+ "debug/elf"
+ "debug/macho"
+ "debug/pe"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/google/pprof/internal/elfexec"
+ "github.com/google/pprof/internal/plugin"
+)
+
+// A Binutils implements plugin.ObjTool by invoking the GNU binutils.
+type Binutils struct {
+ mu sync.Mutex
+ rep *binrep
+}
+
+var (
+ objdumpLLVMVerRE = regexp.MustCompile(`LLVM version (?:(\d*)\.(\d*)\.(\d*)|.*(trunk).*)`)
+
+ // Defined for testing
+ elfOpen = elf.Open
+)
+
+// binrep is an immutable representation for Binutils. It is atomically
+// replaced on every mutation to provide thread-safe access.
+type binrep struct {
+ // Commands to invoke.
+ llvmSymbolizer string
+ llvmSymbolizerFound bool
+ addr2line string
+ addr2lineFound bool
+ nm string
+ nmFound bool
+ objdump string
+ objdumpFound bool
+ isLLVMObjdump bool
+
+ // if fast, perform symbolization using nm (symbol names only),
+ // instead of file-line detail from the slower addr2line.
+ fast bool
+}
+
+// get returns the current representation for bu, initializing it if necessary.
+func (bu *Binutils) get() *binrep {
+ bu.mu.Lock()
+ r := bu.rep
+ if r == nil {
+ r = &binrep{}
+ initTools(r, "")
+ bu.rep = r
+ }
+ bu.mu.Unlock()
+ return r
+}
+
+// update modifies the rep for bu via the supplied function.
+func (bu *Binutils) update(fn func(r *binrep)) {
+ r := &binrep{}
+ bu.mu.Lock()
+ defer bu.mu.Unlock()
+ if bu.rep == nil {
+ initTools(r, "")
+ } else {
+ *r = *bu.rep
+ }
+ fn(r)
+ bu.rep = r
+}
+
+// String returns string representation of the binutils state for debug logging.
+func (bu *Binutils) String() string {
+ r := bu.get()
+ var llvmSymbolizer, addr2line, nm, objdump string
+ if r.llvmSymbolizerFound {
+ llvmSymbolizer = r.llvmSymbolizer
+ }
+ if r.addr2lineFound {
+ addr2line = r.addr2line
+ }
+ if r.nmFound {
+ nm = r.nm
+ }
+ if r.objdumpFound {
+ objdump = r.objdump
+ }
+ return fmt.Sprintf("llvm-symbolizer=%q addr2line=%q nm=%q objdump=%q fast=%t",
+ llvmSymbolizer, addr2line, nm, objdump, r.fast)
+}
+
+// SetFastSymbolization sets a toggle that makes binutils use fast
+// symbolization (using nm), which is much faster than addr2line but
+// provides only symbol name information (no file/line).
+func (bu *Binutils) SetFastSymbolization(fast bool) {
+ bu.update(func(r *binrep) { r.fast = fast })
+}
+
+// SetTools processes the contents of the tools option. It
+// expects a set of entries separated by commas; each entry is a pair
+// of the form t:path, where cmd will be used to look only for the
+// tool named t. If t is not specified, the path is searched for all
+// tools.
+func (bu *Binutils) SetTools(config string) {
+ bu.update(func(r *binrep) { initTools(r, config) })
+}
+
+func initTools(b *binrep, config string) {
+ // paths collect paths per tool; Key "" contains the default.
+ paths := make(map[string][]string)
+ for _, t := range strings.Split(config, ",") {
+ name, path := "", t
+ if ct := strings.SplitN(t, ":", 2); len(ct) == 2 {
+ name, path = ct[0], ct[1]
+ }
+ paths[name] = append(paths[name], path)
+ }
+
+ defaultPath := paths[""]
+ b.llvmSymbolizer, b.llvmSymbolizerFound = chooseExe([]string{"llvm-symbolizer"}, []string{}, append(paths["llvm-symbolizer"], defaultPath...))
+ b.addr2line, b.addr2lineFound = chooseExe([]string{"addr2line"}, []string{"gaddr2line"}, append(paths["addr2line"], defaultPath...))
+ // The "-n" option is supported by LLVM since 2011. The output of llvm-nm
+ // and GNU nm with "-n" option is interchangeable for our purposes, so we do
+ // not need to differrentiate them.
+ b.nm, b.nmFound = chooseExe([]string{"llvm-nm", "nm"}, []string{"gnm"}, append(paths["nm"], defaultPath...))
+ b.objdump, b.objdumpFound, b.isLLVMObjdump = findObjdump(append(paths["objdump"], defaultPath...))
+}
+
+// findObjdump finds and returns path to preferred objdump binary.
+// Order of preference is: llvm-objdump, objdump.
+// On MacOS only, also looks for gobjdump with least preference.
+// Accepts a list of paths and returns:
+// a string with path to the preferred objdump binary if found,
+// or an empty string if not found;
+// a boolean if any acceptable objdump was found;
+// a boolean indicating if it is an LLVM objdump.
+func findObjdump(paths []string) (string, bool, bool) {
+ objdumpNames := []string{"llvm-objdump", "objdump"}
+ if runtime.GOOS == "darwin" {
+ objdumpNames = append(objdumpNames, "gobjdump")
+ }
+
+ for _, objdumpName := range objdumpNames {
+ if objdump, objdumpFound := findExe(objdumpName, paths); objdumpFound {
+ cmdOut, err := exec.Command(objdump, "--version").Output()
+ if err != nil {
+ continue
+ }
+ if isLLVMObjdump(string(cmdOut)) {
+ return objdump, true, true
+ }
+ if isBuObjdump(string(cmdOut)) {
+ return objdump, true, false
+ }
+ }
+ }
+ return "", false, false
+}
+
+// chooseExe finds and returns path to preferred binary. names is a list of
+// names to search on both Linux and OSX. osxNames is a list of names specific
+// to OSX. names always has a higher priority than osxNames. The order of
+// the name within each list decides its priority (e.g. the first name has a
+// higher priority than the second name in the list).
+//
+// It returns a string with path to the binary and a boolean indicating if any
+// acceptable binary was found.
+func chooseExe(names, osxNames []string, paths []string) (string, bool) {
+ if runtime.GOOS == "darwin" {
+ names = append(names, osxNames...)
+ }
+ for _, name := range names {
+ if binary, found := findExe(name, paths); found {
+ return binary, true
+ }
+ }
+ return "", false
+}
+
+// isLLVMObjdump accepts a string with path to an objdump binary,
+// and returns a boolean indicating if the given binary is an LLVM
+// objdump binary of an acceptable version.
+func isLLVMObjdump(output string) bool {
+ fields := objdumpLLVMVerRE.FindStringSubmatch(output)
+ if len(fields) != 5 {
+ return false
+ }
+ if fields[4] == "trunk" {
+ return true
+ }
+ verMajor, err := strconv.Atoi(fields[1])
+ if err != nil {
+ return false
+ }
+ verPatch, err := strconv.Atoi(fields[3])
+ if err != nil {
+ return false
+ }
+ if runtime.GOOS == "linux" && verMajor >= 8 {
+ // Ensure LLVM objdump is at least version 8.0 on Linux.
+ // Some flags, like --demangle, and double dashes for options are
+ // not supported by previous versions.
+ return true
+ }
+ if runtime.GOOS == "darwin" {
+ // Ensure LLVM objdump is at least version 10.0.1 on MacOS.
+ return verMajor > 10 || (verMajor == 10 && verPatch >= 1)
+ }
+ return false
+}
+
+// isBuObjdump accepts a string with path to an objdump binary,
+// and returns a boolean indicating if the given binary is a GNU
+// binutils objdump binary. No version check is performed.
+func isBuObjdump(output string) bool {
+ return strings.Contains(output, "GNU objdump")
+}
+
+// findExe looks for an executable command on a set of paths.
+// If it cannot find it, returns cmd.
+func findExe(cmd string, paths []string) (string, bool) {
+ for _, p := range paths {
+ cp := filepath.Join(p, cmd)
+ if c, err := exec.LookPath(cp); err == nil {
+ return c, true
+ }
+ }
+ return cmd, false
+}
+
+// Disasm returns the assembly instructions for the specified address range
+// of a binary.
+func (bu *Binutils) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) {
+ b := bu.get()
+ if !b.objdumpFound {
+ return nil, errors.New("cannot disasm: no objdump tool available")
+ }
+ args := []string{"--disassemble", "--demangle", "--no-show-raw-insn",
+ "--line-numbers", fmt.Sprintf("--start-address=%#x", start),
+ fmt.Sprintf("--stop-address=%#x", end)}
+
+ if intelSyntax {
+ if b.isLLVMObjdump {
+ args = append(args, "--x86-asm-syntax=intel")
+ } else {
+ args = append(args, "-M", "intel")
+ }
+ }
+
+ args = append(args, file)
+ cmd := exec.Command(b.objdump, args...)
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("%v: %v", cmd.Args, err)
+ }
+
+ return disassemble(out)
+}
+
+// Open satisfies the plugin.ObjTool interface.
+func (bu *Binutils) Open(name string, start, limit, offset uint64, relocationSymbol string) (plugin.ObjFile, error) {
+ b := bu.get()
+
+ // Make sure file is a supported executable.
+ // This uses magic numbers, mainly to provide better error messages but
+ // it should also help speed.
+
+ if _, err := os.Stat(name); err != nil {
+ // For testing, do not require file name to exist.
+ if strings.Contains(b.addr2line, "testdata/") {
+ return &fileAddr2Line{file: file{b: b, name: name}}, nil
+ }
+ return nil, err
+ }
+
+ // Read the first 4 bytes of the file.
+
+ f, err := os.Open(name)
+ if err != nil {
+ return nil, fmt.Errorf("error opening %s: %v", name, err)
+ }
+ defer f.Close()
+
+ var header [4]byte
+ if _, err = io.ReadFull(f, header[:]); err != nil {
+ return nil, fmt.Errorf("error reading magic number from %s: %v", name, err)
+ }
+
+ elfMagic := string(header[:])
+
+ // Match against supported file types.
+ if elfMagic == elf.ELFMAG {
+ f, err := b.openELF(name, start, limit, offset, relocationSymbol)
+ if err != nil {
+ return nil, fmt.Errorf("error reading ELF file %s: %v", name, err)
+ }
+ return f, nil
+ }
+
+ // Mach-O magic numbers can be big or little endian.
+ machoMagicLittle := binary.LittleEndian.Uint32(header[:])
+ machoMagicBig := binary.BigEndian.Uint32(header[:])
+
+ if machoMagicLittle == macho.Magic32 || machoMagicLittle == macho.Magic64 ||
+ machoMagicBig == macho.Magic32 || machoMagicBig == macho.Magic64 {
+ f, err := b.openMachO(name, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("error reading Mach-O file %s: %v", name, err)
+ }
+ return f, nil
+ }
+ if machoMagicLittle == macho.MagicFat || machoMagicBig == macho.MagicFat {
+ f, err := b.openFatMachO(name, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("error reading fat Mach-O file %s: %v", name, err)
+ }
+ return f, nil
+ }
+
+ peMagic := string(header[:2])
+ if peMagic == "MZ" {
+ f, err := b.openPE(name, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("error reading PE file %s: %v", name, err)
+ }
+ return f, nil
+ }
+
+ return nil, fmt.Errorf("unrecognized binary format: %s", name)
+}
+
+func (b *binrep) openMachOCommon(name string, of *macho.File, start, limit, offset uint64) (plugin.ObjFile, error) {
+
+ // Subtract the load address of the __TEXT section. Usually 0 for shared
+ // libraries or 0x100000000 for executables. You can check this value by
+ // running `objdump -private-headers <file>`.
+
+ textSegment := of.Segment("__TEXT")
+ if textSegment == nil {
+ return nil, fmt.Errorf("could not identify base for %s: no __TEXT segment", name)
+ }
+ if textSegment.Addr > start {
+ return nil, fmt.Errorf("could not identify base for %s: __TEXT segment address (0x%x) > mapping start address (0x%x)",
+ name, textSegment.Addr, start)
+ }
+
+ base := start - textSegment.Addr
+
+ if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) {
+ return &fileNM{file: file{b: b, name: name, base: base}}, nil
+ }
+ return &fileAddr2Line{file: file{b: b, name: name, base: base}}, nil
+}
+
+func (b *binrep) openFatMachO(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ of, err := macho.OpenFat(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer of.Close()
+
+ if len(of.Arches) == 0 {
+ return nil, fmt.Errorf("empty fat Mach-O file: %s", name)
+ }
+
+ var arch macho.Cpu
+ // Use the host architecture.
+ // TODO: This is not ideal because the host architecture may not be the one
+ // that was profiled. E.g. an amd64 host can profile a 386 program.
+ switch runtime.GOARCH {
+ case "386":
+ arch = macho.Cpu386
+ case "amd64", "amd64p32":
+ arch = macho.CpuAmd64
+ case "arm", "armbe", "arm64", "arm64be":
+ arch = macho.CpuArm
+ case "ppc":
+ arch = macho.CpuPpc
+ case "ppc64", "ppc64le":
+ arch = macho.CpuPpc64
+ default:
+ return nil, fmt.Errorf("unsupported host architecture for %s: %s", name, runtime.GOARCH)
+ }
+ for i := range of.Arches {
+ if of.Arches[i].Cpu == arch {
+ return b.openMachOCommon(name, of.Arches[i].File, start, limit, offset)
+ }
+ }
+ return nil, fmt.Errorf("architecture not found in %s: %s", name, runtime.GOARCH)
+}
+
+func (b *binrep) openMachO(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ of, err := macho.Open(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer of.Close()
+
+ return b.openMachOCommon(name, of, start, limit, offset)
+}
+
+func (b *binrep) openELF(name string, start, limit, offset uint64, relocationSymbol string) (plugin.ObjFile, error) {
+ ef, err := elfOpen(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer ef.Close()
+
+ buildID := ""
+ if f, err := os.Open(name); err == nil {
+ if id, err := elfexec.GetBuildID(f); err == nil {
+ buildID = fmt.Sprintf("%x", id)
+ }
+ }
+
+ var (
+ kernelOffset *uint64
+ pageAligned = func(addr uint64) bool { return addr%4096 == 0 }
+ )
+ if strings.Contains(name, "vmlinux") || !pageAligned(start) || !pageAligned(limit) || !pageAligned(offset) {
+ // Reading all Symbols is expensive, and we only rarely need it so
+ // we don't want to do it every time. But if _stext happens to be
+ // page-aligned but isn't the same as Vaddr, we would symbolize
+ // wrong. So if the name the addresses aren't page aligned, or if
+ // the name is "vmlinux" we read _stext. We can be wrong if: (1)
+ // someone passes a kernel path that doesn't contain "vmlinux" AND
+ // (2) _stext is page-aligned AND (3) _stext is not at Vaddr
+ symbols, err := ef.Symbols()
+ if err != nil && err != elf.ErrNoSymbols {
+ return nil, err
+ }
+
+ // The kernel relocation symbol (the mapping start address) can be either
+ // _text or _stext. When profiles are generated by `perf`, which one was used is
+ // distinguished by the mapping name for the kernel image:
+ // '[kernel.kallsyms]_text' or '[kernel.kallsyms]_stext', respectively. If we haven't
+ // been able to parse it from the mapping, we default to _stext.
+ if relocationSymbol == "" {
+ relocationSymbol = "_stext"
+ }
+ for _, s := range symbols {
+ if s.Name == relocationSymbol {
+ kernelOffset = &s.Value
+ break
+ }
+ }
+ }
+
+ // Check that we can compute a base for the binary. This may not be the
+ // correct base value, so we don't save it. We delay computing the actual base
+ // value until we have a sample address for this mapping, so that we can
+ // correctly identify the associated program segment that is needed to compute
+ // the base.
+ if _, err := elfexec.GetBase(&ef.FileHeader, elfexec.FindTextProgHeader(ef), kernelOffset, start, limit, offset); err != nil {
+ return nil, fmt.Errorf("could not identify base for %s: %v", name, err)
+ }
+
+ if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) {
+ return &fileNM{file: file{
+ b: b,
+ name: name,
+ buildID: buildID,
+ m: &elfMapping{start: start, limit: limit, offset: offset, kernelOffset: kernelOffset},
+ }}, nil
+ }
+ return &fileAddr2Line{file: file{
+ b: b,
+ name: name,
+ buildID: buildID,
+ m: &elfMapping{start: start, limit: limit, offset: offset, kernelOffset: kernelOffset},
+ }}, nil
+}
+
+func (b *binrep) openPE(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ pf, err := pe.Open(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer pf.Close()
+
+ var imageBase uint64
+ switch h := pf.OptionalHeader.(type) {
+ case *pe.OptionalHeader32:
+ imageBase = uint64(h.ImageBase)
+ case *pe.OptionalHeader64:
+ imageBase = uint64(h.ImageBase)
+ default:
+ return nil, fmt.Errorf("unknown OptionalHeader %T", pf.OptionalHeader)
+ }
+
+ var base uint64
+ if start > 0 {
+ base = start - imageBase
+ }
+ if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) {
+ return &fileNM{file: file{b: b, name: name, base: base}}, nil
+ }
+ return &fileAddr2Line{file: file{b: b, name: name, base: base}}, nil
+}
+
+// elfMapping stores the parameters of a runtime mapping that are needed to
+// identify the ELF segment associated with a mapping.
+type elfMapping struct {
+ // Runtime mapping parameters.
+ start, limit, offset uint64
+ // Offset of kernel relocation symbol. Only defined for kernel images, nil otherwise.
+ kernelOffset *uint64
+}
+
+// findProgramHeader returns the program segment that matches the current
+// mapping and the given address, or an error if it cannot find a unique program
+// header.
+func (m *elfMapping) findProgramHeader(ef *elf.File, addr uint64) (*elf.ProgHeader, error) {
+ // For user space executables, we try to find the actual program segment that
+ // is associated with the given mapping. Skip this search if limit <= start.
+ // We cannot use just a check on the start address of the mapping to tell if
+ // it's a kernel / .ko module mapping, because with quipper address remapping
+ // enabled, the address would be in the lower half of the address space.
+
+ if m.kernelOffset != nil || m.start >= m.limit || m.limit >= (uint64(1)<<63) {
+ // For the kernel, find the program segment that includes the .text section.
+ return elfexec.FindTextProgHeader(ef), nil
+ }
+
+ // Fetch all the loadable segments.
+ var phdrs []elf.ProgHeader
+ for i := range ef.Progs {
+ if ef.Progs[i].Type == elf.PT_LOAD {
+ phdrs = append(phdrs, ef.Progs[i].ProgHeader)
+ }
+ }
+ // Some ELF files don't contain any loadable program segments, e.g. .ko
+ // kernel modules. It's not an error to have no header in such cases.
+ if len(phdrs) == 0 {
+ return nil, nil
+ }
+ // Get all program headers associated with the mapping.
+ headers := elfexec.ProgramHeadersForMapping(phdrs, m.offset, m.limit-m.start)
+ if len(headers) == 0 {
+ return nil, errors.New("no program header matches mapping info")
+ }
+ if len(headers) == 1 {
+ return headers[0], nil
+ }
+
+ // Use the file offset corresponding to the address to symbolize, to narrow
+ // down the header.
+ return elfexec.HeaderForFileOffset(headers, addr-m.start+m.offset)
+}
+
+// file implements the binutils.ObjFile interface.
+type file struct {
+ b *binrep
+ name string
+ buildID string
+
+ baseOnce sync.Once // Ensures the base, baseErr and isData are computed once.
+ base uint64
+ baseErr error // Any eventual error while computing the base.
+ isData bool
+ // Mapping information. Relevant only for ELF files, nil otherwise.
+ m *elfMapping
+}
+
+// computeBase computes the relocation base for the given binary file only if
+// the elfMapping field is set. It populates the base and isData fields and
+// returns an error.
+func (f *file) computeBase(addr uint64) error {
+ if f == nil || f.m == nil {
+ return nil
+ }
+ if addr < f.m.start || addr >= f.m.limit {
+ return fmt.Errorf("specified address %x is outside the mapping range [%x, %x] for file %q", addr, f.m.start, f.m.limit, f.name)
+ }
+ ef, err := elfOpen(f.name)
+ if err != nil {
+ return fmt.Errorf("error parsing %s: %v", f.name, err)
+ }
+ defer ef.Close()
+
+ ph, err := f.m.findProgramHeader(ef, addr)
+ if err != nil {
+ return fmt.Errorf("failed to find program header for file %q, ELF mapping %#v, address %x: %v", f.name, *f.m, addr, err)
+ }
+
+ base, err := elfexec.GetBase(&ef.FileHeader, ph, f.m.kernelOffset, f.m.start, f.m.limit, f.m.offset)
+ if err != nil {
+ return err
+ }
+ f.base = base
+ f.isData = ph != nil && ph.Flags&elf.PF_X == 0
+ return nil
+}
+
+func (f *file) Name() string {
+ return f.name
+}
+
+func (f *file) ObjAddr(addr uint64) (uint64, error) {
+ f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) })
+ if f.baseErr != nil {
+ return 0, f.baseErr
+ }
+ return addr - f.base, nil
+}
+
+func (f *file) BuildID() string {
+ return f.buildID
+}
+
+func (f *file) SourceLine(addr uint64) ([]plugin.Frame, error) {
+ f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) })
+ if f.baseErr != nil {
+ return nil, f.baseErr
+ }
+ return nil, nil
+}
+
+func (f *file) Close() error {
+ return nil
+}
+
+func (f *file) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) {
+ // Get from nm a list of symbols sorted by address.
+ cmd := exec.Command(f.b.nm, "-n", f.name)
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("%v: %v", cmd.Args, err)
+ }
+
+ return findSymbols(out, f.name, r, addr)
+}
+
+// fileNM implements the binutils.ObjFile interface, using 'nm' to map
+// addresses to symbols (without file/line number information). It is
+// faster than fileAddr2Line.
+type fileNM struct {
+ file
+ addr2linernm *addr2LinerNM
+}
+
+func (f *fileNM) SourceLine(addr uint64) ([]plugin.Frame, error) {
+ f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) })
+ if f.baseErr != nil {
+ return nil, f.baseErr
+ }
+ if f.addr2linernm == nil {
+ addr2liner, err := newAddr2LinerNM(f.b.nm, f.name, f.base)
+ if err != nil {
+ return nil, err
+ }
+ f.addr2linernm = addr2liner
+ }
+ return f.addr2linernm.addrInfo(addr)
+}
+
+// fileAddr2Line implements the binutils.ObjFile interface, using
+// llvm-symbolizer, if that's available, or addr2line to map addresses to
+// symbols (with file/line number information). It can be slow for large
+// binaries with debug information.
+type fileAddr2Line struct {
+ once sync.Once
+ file
+ addr2liner *addr2Liner
+ llvmSymbolizer *llvmSymbolizer
+ isData bool
+}
+
+func (f *fileAddr2Line) SourceLine(addr uint64) ([]plugin.Frame, error) {
+ f.baseOnce.Do(func() { f.baseErr = f.computeBase(addr) })
+ if f.baseErr != nil {
+ return nil, f.baseErr
+ }
+ f.once.Do(f.init)
+ if f.llvmSymbolizer != nil {
+ return f.llvmSymbolizer.addrInfo(addr)
+ }
+ if f.addr2liner != nil {
+ return f.addr2liner.addrInfo(addr)
+ }
+ return nil, fmt.Errorf("could not find local addr2liner")
+}
+
+func (f *fileAddr2Line) init() {
+ if llvmSymbolizer, err := newLLVMSymbolizer(f.b.llvmSymbolizer, f.name, f.base, f.isData); err == nil {
+ f.llvmSymbolizer = llvmSymbolizer
+ return
+ }
+
+ if addr2liner, err := newAddr2Liner(f.b.addr2line, f.name, f.base); err == nil {
+ f.addr2liner = addr2liner
+
+ // When addr2line encounters some gcc compiled binaries, it
+ // drops interesting parts of names in anonymous namespaces.
+ // Fallback to NM for better function names.
+ if nm, err := newAddr2LinerNM(f.b.nm, f.name, f.base); err == nil {
+ f.addr2liner.nm = nm
+ }
+ }
+}
+
+func (f *fileAddr2Line) Close() error {
+ if f.llvmSymbolizer != nil {
+ f.llvmSymbolizer.rw.close()
+ f.llvmSymbolizer = nil
+ }
+ if f.addr2liner != nil {
+ f.addr2liner.rw.close()
+ f.addr2liner = nil
+ }
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go
new file mode 100644
index 0000000..2709ef8
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go
@@ -0,0 +1,180 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bytes"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/ianlancetaylor/demangle"
+)
+
+var (
+ nmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+)\s+(.)\s+(.*)`)
+ objdumpAsmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+):\s+(.*)`)
+ objdumpOutputFileLine = regexp.MustCompile(`^;?\s?(.*):([0-9]+)`)
+ objdumpOutputFunction = regexp.MustCompile(`^;?\s?(\S.*)\(\):`)
+ objdumpOutputFunctionLLVM = regexp.MustCompile(`^([[:xdigit:]]+)?\s?(.*):`)
+)
+
+func findSymbols(syms []byte, file string, r *regexp.Regexp, address uint64) ([]*plugin.Sym, error) {
+ // Collect all symbols from the nm output, grouping names mapped to
+ // the same address into a single symbol.
+
+ // The symbols to return.
+ var symbols []*plugin.Sym
+
+ // The current group of symbol names, and the address they are all at.
+ names, start := []string{}, uint64(0)
+
+ buf := bytes.NewBuffer(syms)
+
+ for {
+ symAddr, name, err := nextSymbol(buf)
+ if err == io.EOF {
+ // Done. If there was an unfinished group, append it.
+ if len(names) != 0 {
+ if match := matchSymbol(names, start, symAddr-1, r, address); match != nil {
+ symbols = append(symbols, &plugin.Sym{Name: match, File: file, Start: start, End: symAddr - 1})
+ }
+ }
+
+ // And return the symbols.
+ return symbols, nil
+ }
+
+ if err != nil {
+ // There was some kind of serious error reading nm's output.
+ return nil, err
+ }
+
+ // If this symbol is at the same address as the current group, add it to the group.
+ if symAddr == start {
+ names = append(names, name)
+ continue
+ }
+
+ // Otherwise append the current group to the list of symbols.
+ if match := matchSymbol(names, start, symAddr-1, r, address); match != nil {
+ symbols = append(symbols, &plugin.Sym{Name: match, File: file, Start: start, End: symAddr - 1})
+ }
+
+ // And start a new group.
+ names, start = []string{name}, symAddr
+ }
+}
+
+// matchSymbol checks if a symbol is to be selected by checking its
+// name to the regexp and optionally its address. It returns the name(s)
+// to be used for the matched symbol, or nil if no match
+func matchSymbol(names []string, start, end uint64, r *regexp.Regexp, address uint64) []string {
+ if address != 0 && address >= start && address <= end {
+ return names
+ }
+ for _, name := range names {
+ if r == nil || r.MatchString(name) {
+ return []string{name}
+ }
+
+ // Match all possible demangled versions of the name.
+ for _, o := range [][]demangle.Option{
+ {demangle.NoClones},
+ {demangle.NoParams, demangle.NoEnclosingParams},
+ {demangle.NoParams, demangle.NoEnclosingParams, demangle.NoTemplateParams},
+ } {
+ if demangled, err := demangle.ToString(name, o...); err == nil && r.MatchString(demangled) {
+ return []string{demangled}
+ }
+ }
+ }
+ return nil
+}
+
+// disassemble parses the output of the objdump command and returns
+// the assembly instructions in a slice.
+func disassemble(asm []byte) ([]plugin.Inst, error) {
+ buf := bytes.NewBuffer(asm)
+ function, file, line := "", "", 0
+ var assembly []plugin.Inst
+ for {
+ input, err := buf.ReadString('\n')
+ if err != nil {
+ if err != io.EOF {
+ return nil, err
+ }
+ if input == "" {
+ break
+ }
+ }
+ input = strings.TrimSpace(input)
+
+ if fields := objdumpAsmOutputRE.FindStringSubmatch(input); len(fields) == 3 {
+ if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil {
+ assembly = append(assembly,
+ plugin.Inst{
+ Addr: address,
+ Text: fields[2],
+ Function: function,
+ File: file,
+ Line: line,
+ })
+ continue
+ }
+ }
+ if fields := objdumpOutputFileLine.FindStringSubmatch(input); len(fields) == 3 {
+ if l, err := strconv.ParseUint(fields[2], 10, 32); err == nil {
+ file, line = fields[1], int(l)
+ }
+ continue
+ }
+ if fields := objdumpOutputFunction.FindStringSubmatch(input); len(fields) == 2 {
+ function = fields[1]
+ continue
+ } else {
+ if fields := objdumpOutputFunctionLLVM.FindStringSubmatch(input); len(fields) == 3 {
+ function = fields[2]
+ continue
+ }
+ }
+ // Reset on unrecognized lines.
+ function, file, line = "", "", 0
+ }
+
+ return assembly, nil
+}
+
+// nextSymbol parses the nm output to find the next symbol listed.
+// Skips over any output it cannot recognize.
+func nextSymbol(buf *bytes.Buffer) (uint64, string, error) {
+ for {
+ line, err := buf.ReadString('\n')
+ if err != nil {
+ if err != io.EOF || line == "" {
+ return 0, "", err
+ }
+ }
+ line = strings.TrimSpace(line)
+
+ if fields := nmOutputRE.FindStringSubmatch(line); len(fields) == 4 {
+ if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil {
+ return address, fields[3], nil
+ }
+ }
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go
new file mode 100644
index 0000000..b97ef85
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go
@@ -0,0 +1,360 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "errors"
+ "fmt"
+ "os"
+
+ "github.com/google/pprof/internal/binutils"
+ "github.com/google/pprof/internal/plugin"
+)
+
+type source struct {
+ Sources []string
+ ExecName string
+ BuildID string
+ Base []string
+ DiffBase bool
+ Normalize bool
+
+ Seconds int
+ Timeout int
+ Symbolize string
+ HTTPHostport string
+ HTTPDisableBrowser bool
+ Comment string
+}
+
+// parseFlags parses the command lines through the specified flags package
+// and returns the source of the profile and optionally the command
+// for the kind of report to generate (nil for interactive use).
+func parseFlags(o *plugin.Options) (*source, []string, error) {
+ flag := o.Flagset
+ // Comparisons.
+ flagDiffBase := flag.StringList("diff_base", "", "Source of base profile for comparison")
+ flagBase := flag.StringList("base", "", "Source of base profile for profile subtraction")
+ // Source options.
+ flagSymbolize := flag.String("symbolize", "", "Options for profile symbolization")
+ flagBuildID := flag.String("buildid", "", "Override build id for first mapping")
+ flagTimeout := flag.Int("timeout", -1, "Timeout in seconds for fetching a profile")
+ flagAddComment := flag.String("add_comment", "", "Annotation string to record in the profile")
+ // CPU profile options
+ flagSeconds := flag.Int("seconds", -1, "Length of time for dynamic profiles")
+ // Heap profile options
+ flagInUseSpace := flag.Bool("inuse_space", false, "Display in-use memory size")
+ flagInUseObjects := flag.Bool("inuse_objects", false, "Display in-use object counts")
+ flagAllocSpace := flag.Bool("alloc_space", false, "Display allocated memory size")
+ flagAllocObjects := flag.Bool("alloc_objects", false, "Display allocated object counts")
+ // Contention profile options
+ flagTotalDelay := flag.Bool("total_delay", false, "Display total delay at each region")
+ flagContentions := flag.Bool("contentions", false, "Display number of delays at each region")
+ flagMeanDelay := flag.Bool("mean_delay", false, "Display mean delay at each region")
+ flagTools := flag.String("tools", os.Getenv("PPROF_TOOLS"), "Path for object tool pathnames")
+
+ flagHTTP := flag.String("http", "", "Present interactive web UI at the specified http host:port")
+ flagNoBrowser := flag.Bool("no_browser", false, "Skip opening a browser for the interactive web UI")
+
+ // Flags that set configuration properties.
+ cfg := currentConfig()
+ configFlagSetter := installConfigFlags(flag, &cfg)
+
+ flagCommands := make(map[string]*bool)
+ flagParamCommands := make(map[string]*string)
+ for name, cmd := range pprofCommands {
+ if cmd.hasParam {
+ flagParamCommands[name] = flag.String(name, "", "Generate a report in "+name+" format, matching regexp")
+ } else {
+ flagCommands[name] = flag.Bool(name, false, "Generate a report in "+name+" format")
+ }
+ }
+
+ args := flag.Parse(func() {
+ o.UI.Print(usageMsgHdr +
+ usage(true) +
+ usageMsgSrc +
+ flag.ExtraUsage() +
+ usageMsgVars)
+ })
+ if len(args) == 0 {
+ return nil, nil, errors.New("no profile source specified")
+ }
+
+ var execName string
+ // Recognize first argument as an executable or buildid override.
+ if len(args) > 1 {
+ arg0 := args[0]
+ if file, err := o.Obj.Open(arg0, 0, ^uint64(0), 0, ""); err == nil {
+ file.Close()
+ execName = arg0
+ args = args[1:]
+ }
+ }
+
+ // Apply any specified flags to cfg.
+ if err := configFlagSetter(); err != nil {
+ return nil, nil, err
+ }
+
+ cmd, err := outputFormat(flagCommands, flagParamCommands)
+ if err != nil {
+ return nil, nil, err
+ }
+ if cmd != nil && *flagHTTP != "" {
+ return nil, nil, errors.New("-http is not compatible with an output format on the command line")
+ }
+
+ if *flagNoBrowser && *flagHTTP == "" {
+ return nil, nil, errors.New("-no_browser only makes sense with -http")
+ }
+
+ si := cfg.SampleIndex
+ si = sampleIndex(flagTotalDelay, si, "delay", "-total_delay", o.UI)
+ si = sampleIndex(flagMeanDelay, si, "delay", "-mean_delay", o.UI)
+ si = sampleIndex(flagContentions, si, "contentions", "-contentions", o.UI)
+ si = sampleIndex(flagInUseSpace, si, "inuse_space", "-inuse_space", o.UI)
+ si = sampleIndex(flagInUseObjects, si, "inuse_objects", "-inuse_objects", o.UI)
+ si = sampleIndex(flagAllocSpace, si, "alloc_space", "-alloc_space", o.UI)
+ si = sampleIndex(flagAllocObjects, si, "alloc_objects", "-alloc_objects", o.UI)
+ cfg.SampleIndex = si
+
+ if *flagMeanDelay {
+ cfg.Mean = true
+ }
+
+ source := &source{
+ Sources: args,
+ ExecName: execName,
+ BuildID: *flagBuildID,
+ Seconds: *flagSeconds,
+ Timeout: *flagTimeout,
+ Symbolize: *flagSymbolize,
+ HTTPHostport: *flagHTTP,
+ HTTPDisableBrowser: *flagNoBrowser,
+ Comment: *flagAddComment,
+ }
+
+ if err := source.addBaseProfiles(*flagBase, *flagDiffBase); err != nil {
+ return nil, nil, err
+ }
+
+ normalize := cfg.Normalize
+ if normalize && len(source.Base) == 0 {
+ return nil, nil, errors.New("must have base profile to normalize by")
+ }
+ source.Normalize = normalize
+
+ if bu, ok := o.Obj.(*binutils.Binutils); ok {
+ bu.SetTools(*flagTools)
+ }
+
+ setCurrentConfig(cfg)
+ return source, cmd, nil
+}
+
+// addBaseProfiles adds the list of base profiles or diff base profiles to
+// the source. This function will return an error if both base and diff base
+// profiles are specified.
+func (source *source) addBaseProfiles(flagBase, flagDiffBase []*string) error {
+ base, diffBase := dropEmpty(flagBase), dropEmpty(flagDiffBase)
+ if len(base) > 0 && len(diffBase) > 0 {
+ return errors.New("-base and -diff_base flags cannot both be specified")
+ }
+
+ source.Base = base
+ if len(diffBase) > 0 {
+ source.Base, source.DiffBase = diffBase, true
+ }
+ return nil
+}
+
+// dropEmpty list takes a slice of string pointers, and outputs a slice of
+// non-empty strings associated with the flag.
+func dropEmpty(list []*string) []string {
+ var l []string
+ for _, s := range list {
+ if *s != "" {
+ l = append(l, *s)
+ }
+ }
+ return l
+}
+
+// installConfigFlags creates command line flags for configuration
+// fields and returns a function which can be called after flags have
+// been parsed to copy any flags specified on the command line to
+// *cfg.
+func installConfigFlags(flag plugin.FlagSet, cfg *config) func() error {
+ // List of functions for setting the different parts of a config.
+ var setters []func()
+ var err error // Holds any errors encountered while running setters.
+
+ for _, field := range configFields {
+ n := field.name
+ help := configHelp[n]
+ var setter func()
+ switch ptr := cfg.fieldPtr(field).(type) {
+ case *bool:
+ f := flag.Bool(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ case *int:
+ f := flag.Int(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ case *float64:
+ f := flag.Float64(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ case *string:
+ if len(field.choices) == 0 {
+ f := flag.String(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ } else {
+ // Make a separate flag per possible choice.
+ // Set all flags to initially false so we can
+ // identify conflicts.
+ bools := make(map[string]*bool)
+ for _, choice := range field.choices {
+ bools[choice] = flag.Bool(choice, false, configHelp[choice])
+ }
+ setter = func() {
+ var set []string
+ for k, v := range bools {
+ if *v {
+ set = append(set, k)
+ }
+ }
+ switch len(set) {
+ case 0:
+ // Leave as default value.
+ case 1:
+ *ptr = set[0]
+ default:
+ err = fmt.Errorf("conflicting options set: %v", set)
+ }
+ }
+ }
+ }
+ setters = append(setters, setter)
+ }
+
+ return func() error {
+ // Apply the setter for every flag.
+ for _, setter := range setters {
+ setter()
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+}
+
+func sampleIndex(flag *bool, si string, sampleType, option string, ui plugin.UI) string {
+ if *flag {
+ if si == "" {
+ return sampleType
+ }
+ ui.PrintErr("Multiple value selections, ignoring ", option)
+ }
+ return si
+}
+
+func outputFormat(bcmd map[string]*bool, acmd map[string]*string) (cmd []string, err error) {
+ for n, b := range bcmd {
+ if *b {
+ if cmd != nil {
+ return nil, errors.New("must set at most one output format")
+ }
+ cmd = []string{n}
+ }
+ }
+ for n, s := range acmd {
+ if *s != "" {
+ if cmd != nil {
+ return nil, errors.New("must set at most one output format")
+ }
+ cmd = []string{n, *s}
+ }
+ }
+ return cmd, nil
+}
+
+var usageMsgHdr = `usage:
+
+Produce output in the specified format.
+
+ pprof <format> [options] [binary] <source> ...
+
+Omit the format to get an interactive shell whose commands can be used
+to generate various views of a profile
+
+ pprof [options] [binary] <source> ...
+
+Omit the format and provide the "-http" flag to get an interactive web
+interface at the specified host:port that can be used to navigate through
+various views of a profile.
+
+ pprof -http [host]:[port] [options] [binary] <source> ...
+
+Details:
+`
+
+var usageMsgSrc = "\n\n" +
+ " Source options:\n" +
+ " -seconds Duration for time-based profile collection\n" +
+ " -timeout Timeout in seconds for profile collection\n" +
+ " -buildid Override build id for main binary\n" +
+ " -add_comment Free-form annotation to add to the profile\n" +
+ " Displayed on some reports or with pprof -comments\n" +
+ " -diff_base source Source of base profile for comparison\n" +
+ " -base source Source of base profile for profile subtraction\n" +
+ " profile.pb.gz Profile in compressed protobuf format\n" +
+ " legacy_profile Profile in legacy pprof format\n" +
+ " http://host/profile URL for profile handler to retrieve\n" +
+ " -symbolize= Controls source of symbol information\n" +
+ " none Do not attempt symbolization\n" +
+ " local Examine only local binaries\n" +
+ " fastlocal Only get function names from local binaries\n" +
+ " remote Do not examine local binaries\n" +
+ " force Force re-symbolization\n" +
+ " Binary Local path or build id of binary for symbolization\n"
+
+var usageMsgVars = "\n\n" +
+ " Misc options:\n" +
+ " -http Provide web interface at host:port.\n" +
+ " Host is optional and 'localhost' by default.\n" +
+ " Port is optional and a randomly available port by default.\n" +
+ " -no_browser Skip opening a browser for the interactive web UI.\n" +
+ " -tools Search path for object tools\n" +
+ "\n" +
+ " Legacy convenience options:\n" +
+ " -inuse_space Same as -sample_index=inuse_space\n" +
+ " -inuse_objects Same as -sample_index=inuse_objects\n" +
+ " -alloc_space Same as -sample_index=alloc_space\n" +
+ " -alloc_objects Same as -sample_index=alloc_objects\n" +
+ " -total_delay Same as -sample_index=delay\n" +
+ " -contentions Same as -sample_index=contentions\n" +
+ " -mean_delay Same as -mean -sample_index=delay\n" +
+ "\n" +
+ " Environment Variables:\n" +
+ " PPROF_TMPDIR Location for saved profiles (default $HOME/pprof)\n" +
+ " PPROF_TOOLS Search path for object-level tools\n" +
+ " PPROF_BINARY_PATH Search path for local binary files\n" +
+ " default: $HOME/pprof/binaries\n" +
+ " searches $buildid/$name, $buildid/*, $path/$buildid,\n" +
+ " ${buildid:0:2}/${buildid:2}.debug, $name, $path,\n" +
+ " ${name}.debug, $dir/.debug/${name}.debug,\n" +
+ " usr/lib/debug/$dir/${name}.debug\n" +
+ " * On Windows, %USERPROFILE% is used instead of $HOME"
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go
new file mode 100644
index 0000000..c9edf10
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go
@@ -0,0 +1,459 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+ "os/exec"
+ "runtime"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+)
+
+// commands describes the commands accepted by pprof.
+type commands map[string]*command
+
+// command describes the actions for a pprof command. Includes a
+// function for command-line completion, the report format to use
+// during report generation, any postprocessing functions, and whether
+// the command expects a regexp parameter (typically a function name).
+type command struct {
+ format int // report format to generate
+ postProcess PostProcessor // postprocessing to run on report
+ visualizer PostProcessor // display output using some callback
+ hasParam bool // collect a parameter from the CLI
+ description string // single-line description text saying what the command does
+ usage string // multi-line help text saying how the command is used
+}
+
+// help returns a help string for a command.
+func (c *command) help(name string) string {
+ message := c.description + "\n"
+ if c.usage != "" {
+ message += " Usage:\n"
+ lines := strings.Split(c.usage, "\n")
+ for _, line := range lines {
+ message += fmt.Sprintf(" %s\n", line)
+ }
+ }
+ return message + "\n"
+}
+
+// AddCommand adds an additional command to the set of commands
+// accepted by pprof. This enables extensions to add new commands for
+// specialized visualization formats. If the command specified already
+// exists, it is overwritten.
+func AddCommand(cmd string, format int, post PostProcessor, desc, usage string) {
+ pprofCommands[cmd] = &command{format, post, nil, false, desc, usage}
+}
+
+// SetVariableDefault sets the default value for a pprof
+// variable. This enables extensions to set their own defaults.
+func SetVariableDefault(variable, value string) {
+ configure(variable, value)
+}
+
+// PostProcessor is a function that applies post-processing to the report output
+type PostProcessor func(input io.Reader, output io.Writer, ui plugin.UI) error
+
+// interactiveMode is true if pprof is running on interactive mode, reading
+// commands from its shell.
+var interactiveMode = false
+
+// pprofCommands are the report generation commands recognized by pprof.
+var pprofCommands = commands{
+ // Commands that require no post-processing.
+ "comments": {report.Comments, nil, nil, false, "Output all profile comments", ""},
+ "disasm": {report.Dis, nil, nil, true, "Output assembly listings annotated with samples", listHelp("disasm", true)},
+ "dot": {report.Dot, nil, nil, false, "Outputs a graph in DOT format", reportHelp("dot", false, true)},
+ "list": {report.List, nil, nil, true, "Output annotated source for functions matching regexp", listHelp("list", false)},
+ "peek": {report.Tree, nil, nil, true, "Output callers/callees of functions matching regexp", "peek func_regex\nDisplay callers and callees of functions matching func_regex."},
+ "raw": {report.Raw, nil, nil, false, "Outputs a text representation of the raw profile", ""},
+ "tags": {report.Tags, nil, nil, false, "Outputs all tags in the profile", "tags [tag_regex]* [-ignore_regex]* [>file]\nList tags with key:value matching tag_regex and exclude ignore_regex."},
+ "text": {report.Text, nil, nil, false, "Outputs top entries in text form", reportHelp("text", true, true)},
+ "top": {report.Text, nil, nil, false, "Outputs top entries in text form", reportHelp("top", true, true)},
+ "traces": {report.Traces, nil, nil, false, "Outputs all profile samples in text form", ""},
+ "tree": {report.Tree, nil, nil, false, "Outputs a text rendering of call graph", reportHelp("tree", true, true)},
+
+ // Save binary formats to a file
+ "callgrind": {report.Callgrind, nil, awayFromTTY("callgraph.out"), false, "Outputs a graph in callgrind format", reportHelp("callgrind", false, true)},
+ "proto": {report.Proto, nil, awayFromTTY("pb.gz"), false, "Outputs the profile in compressed protobuf format", ""},
+ "topproto": {report.TopProto, nil, awayFromTTY("pb.gz"), false, "Outputs top entries in compressed protobuf format", ""},
+
+ // Generate report in DOT format and postprocess with dot
+ "gif": {report.Dot, invokeDot("gif"), awayFromTTY("gif"), false, "Outputs a graph image in GIF format", reportHelp("gif", false, true)},
+ "pdf": {report.Dot, invokeDot("pdf"), awayFromTTY("pdf"), false, "Outputs a graph in PDF format", reportHelp("pdf", false, true)},
+ "png": {report.Dot, invokeDot("png"), awayFromTTY("png"), false, "Outputs a graph image in PNG format", reportHelp("png", false, true)},
+ "ps": {report.Dot, invokeDot("ps"), awayFromTTY("ps"), false, "Outputs a graph in PS format", reportHelp("ps", false, true)},
+
+ // Save SVG output into a file
+ "svg": {report.Dot, massageDotSVG(), awayFromTTY("svg"), false, "Outputs a graph in SVG format", reportHelp("svg", false, true)},
+
+ // Visualize postprocessed dot output
+ "eog": {report.Dot, invokeDot("svg"), invokeVisualizer("svg", []string{"eog"}), false, "Visualize graph through eog", reportHelp("eog", false, false)},
+ "evince": {report.Dot, invokeDot("pdf"), invokeVisualizer("pdf", []string{"evince"}), false, "Visualize graph through evince", reportHelp("evince", false, false)},
+ "gv": {report.Dot, invokeDot("ps"), invokeVisualizer("ps", []string{"gv --noantialias"}), false, "Visualize graph through gv", reportHelp("gv", false, false)},
+ "web": {report.Dot, massageDotSVG(), invokeVisualizer("svg", browsers()), false, "Visualize graph through web browser", reportHelp("web", false, false)},
+
+ // Visualize callgrind output
+ "kcachegrind": {report.Callgrind, nil, invokeVisualizer("grind", kcachegrind), false, "Visualize report in KCachegrind", reportHelp("kcachegrind", false, false)},
+
+ // Visualize HTML directly generated by report.
+ "weblist": {report.WebList, nil, invokeVisualizer("html", browsers()), true, "Display annotated source in a web browser", listHelp("weblist", false)},
+}
+
+// configHelp contains help text per configuration parameter.
+var configHelp = map[string]string{
+ // Filename for file-based output formats, stdout by default.
+ "output": helpText("Output filename for file-based outputs"),
+
+ // Comparisons.
+ "drop_negative": helpText(
+ "Ignore negative differences",
+ "Do not show any locations with values <0."),
+
+ // Graph handling options.
+ "call_tree": helpText(
+ "Create a context-sensitive call tree",
+ "Treat locations reached through different paths as separate."),
+
+ // Display options.
+ "relative_percentages": helpText(
+ "Show percentages relative to focused subgraph",
+ "If unset, percentages are relative to full graph before focusing",
+ "to facilitate comparison with original graph."),
+ "unit": helpText(
+ "Measurement units to display",
+ "Scale the sample values to this unit.",
+ "For time-based profiles, use seconds, milliseconds, nanoseconds, etc.",
+ "For memory profiles, use megabytes, kilobytes, bytes, etc.",
+ "Using auto will scale each value independently to the most natural unit."),
+ "compact_labels": "Show minimal headers",
+ "source_path": "Search path for source files",
+ "trim_path": "Path to trim from source paths before search",
+ "intel_syntax": helpText(
+ "Show assembly in Intel syntax",
+ "Only applicable to commands `disasm` and `weblist`"),
+
+ // Filtering options
+ "nodecount": helpText(
+ "Max number of nodes to show",
+ "Uses heuristics to limit the number of locations to be displayed.",
+ "On graphs, dotted edges represent paths through nodes that have been removed."),
+ "nodefraction": "Hide nodes below <f>*total",
+ "edgefraction": "Hide edges below <f>*total",
+ "trim": helpText(
+ "Honor nodefraction/edgefraction/nodecount defaults",
+ "Set to false to get the full profile, without any trimming."),
+ "focus": helpText(
+ "Restricts to samples going through a node matching regexp",
+ "Discard samples that do not include a node matching this regexp.",
+ "Matching includes the function name, filename or object name."),
+ "ignore": helpText(
+ "Skips paths going through any nodes matching regexp",
+ "If set, discard samples that include a node matching this regexp.",
+ "Matching includes the function name, filename or object name."),
+ "prune_from": helpText(
+ "Drops any functions below the matched frame.",
+ "If set, any frames matching the specified regexp and any frames",
+ "below it will be dropped from each sample."),
+ "hide": helpText(
+ "Skips nodes matching regexp",
+ "Discard nodes that match this location.",
+ "Other nodes from samples that include this location will be shown.",
+ "Matching includes the function name, filename or object name."),
+ "show": helpText(
+ "Only show nodes matching regexp",
+ "If set, only show nodes that match this location.",
+ "Matching includes the function name, filename or object name."),
+ "show_from": helpText(
+ "Drops functions above the highest matched frame.",
+ "If set, all frames above the highest match are dropped from every sample.",
+ "Matching includes the function name, filename or object name."),
+ "tagroot": helpText(
+ "Adds pseudo stack frames for labels key/value pairs at the callstack root.",
+ "A comma-separated list of label keys.",
+ "The first key creates frames at the new root."),
+ "tagleaf": helpText(
+ "Adds pseudo stack frames for labels key/value pairs at the callstack leaf.",
+ "A comma-separated list of label keys.",
+ "The last key creates frames at the new leaf."),
+ "tagfocus": helpText(
+ "Restricts to samples with tags in range or matched by regexp",
+ "Use name=value syntax to limit the matching to a specific tag.",
+ "Numeric tag filter examples: 1kb, 1kb:10kb, memory=32mb:",
+ "String tag filter examples: foo, foo.*bar, mytag=foo.*bar"),
+ "tagignore": helpText(
+ "Discard samples with tags in range or matched by regexp",
+ "Use name=value syntax to limit the matching to a specific tag.",
+ "Numeric tag filter examples: 1kb, 1kb:10kb, memory=32mb:",
+ "String tag filter examples: foo, foo.*bar, mytag=foo.*bar"),
+ "tagshow": helpText(
+ "Only consider tags matching this regexp",
+ "Discard tags that do not match this regexp"),
+ "taghide": helpText(
+ "Skip tags matching this regexp",
+ "Discard tags that match this regexp"),
+ // Heap profile options
+ "divide_by": helpText(
+ "Ratio to divide all samples before visualization",
+ "Divide all samples values by a constant, eg the number of processors or jobs."),
+ "mean": helpText(
+ "Average sample value over first value (count)",
+ "For memory profiles, report average memory per allocation.",
+ "For time-based profiles, report average time per event."),
+ "sample_index": helpText(
+ "Sample value to report (0-based index or name)",
+ "Profiles contain multiple values per sample.",
+ "Use sample_index=i to select the ith value (starting at 0)."),
+ "normalize": helpText(
+ "Scales profile based on the base profile."),
+
+ // Data sorting criteria
+ "flat": helpText("Sort entries based on own weight"),
+ "cum": helpText("Sort entries based on cumulative weight"),
+
+ // Output granularity
+ "functions": helpText(
+ "Aggregate at the function level.",
+ "Ignores the filename where the function was defined."),
+ "filefunctions": helpText(
+ "Aggregate at the function level.",
+ "Takes into account the filename where the function was defined."),
+ "files": "Aggregate at the file level.",
+ "lines": "Aggregate at the source code line level.",
+ "addresses": helpText(
+ "Aggregate at the address level.",
+ "Includes functions' addresses in the output."),
+ "noinlines": helpText(
+ "Ignore inlines.",
+ "Attributes inlined functions to their first out-of-line caller."),
+}
+
+func helpText(s ...string) string {
+ return strings.Join(s, "\n") + "\n"
+}
+
+// usage returns a string describing the pprof commands and configuration
+// options. if commandLine is set, the output reflect cli usage.
+func usage(commandLine bool) string {
+ var prefix string
+ if commandLine {
+ prefix = "-"
+ }
+ fmtHelp := func(c, d string) string {
+ return fmt.Sprintf(" %-16s %s", c, strings.SplitN(d, "\n", 2)[0])
+ }
+
+ var commands []string
+ for name, cmd := range pprofCommands {
+ commands = append(commands, fmtHelp(prefix+name, cmd.description))
+ }
+ sort.Strings(commands)
+
+ var help string
+ if commandLine {
+ help = " Output formats (select at most one):\n"
+ } else {
+ help = " Commands:\n"
+ commands = append(commands, fmtHelp("o/options", "List options and their current values"))
+ commands = append(commands, fmtHelp("q/quit/exit/^D", "Exit pprof"))
+ }
+
+ help = help + strings.Join(commands, "\n") + "\n\n" +
+ " Options:\n"
+
+ // Print help for configuration options after sorting them.
+ // Collect choices for multi-choice options print them together.
+ var variables []string
+ var radioStrings []string
+ for _, f := range configFields {
+ if len(f.choices) == 0 {
+ variables = append(variables, fmtHelp(prefix+f.name, configHelp[f.name]))
+ continue
+ }
+ // Format help for for this group.
+ s := []string{fmtHelp(f.name, "")}
+ for _, choice := range f.choices {
+ s = append(s, " "+fmtHelp(prefix+choice, configHelp[choice]))
+ }
+ radioStrings = append(radioStrings, strings.Join(s, "\n"))
+ }
+ sort.Strings(variables)
+ sort.Strings(radioStrings)
+ return help + strings.Join(variables, "\n") + "\n\n" +
+ " Option groups (only set one per group):\n" +
+ strings.Join(radioStrings, "\n")
+}
+
+func reportHelp(c string, cum, redirect bool) string {
+ h := []string{
+ c + " [n] [focus_regex]* [-ignore_regex]*",
+ "Include up to n samples",
+ "Include samples matching focus_regex, and exclude ignore_regex.",
+ }
+ if cum {
+ h[0] += " [-cum]"
+ h = append(h, "-cum sorts the output by cumulative weight")
+ }
+ if redirect {
+ h[0] += " >f"
+ h = append(h, "Optionally save the report on the file f")
+ }
+ return strings.Join(h, "\n")
+}
+
+func listHelp(c string, redirect bool) string {
+ h := []string{
+ c + "<func_regex|address> [-focus_regex]* [-ignore_regex]*",
+ "Include functions matching func_regex, or including the address specified.",
+ "Include samples matching focus_regex, and exclude ignore_regex.",
+ }
+ if redirect {
+ h[0] += " >f"
+ h = append(h, "Optionally save the report on the file f")
+ }
+ return strings.Join(h, "\n")
+}
+
+// browsers returns a list of commands to attempt for web visualization.
+func browsers() []string {
+ var cmds []string
+ if userBrowser := os.Getenv("BROWSER"); userBrowser != "" {
+ cmds = append(cmds, userBrowser)
+ }
+ switch runtime.GOOS {
+ case "darwin":
+ cmds = append(cmds, "/usr/bin/open")
+ case "windows":
+ cmds = append(cmds, "cmd /c start")
+ default:
+ // Commands opening browsers are prioritized over xdg-open, so browser()
+ // command can be used on linux to open the .svg file generated by the -web
+ // command (the .svg file includes embedded javascript so is best viewed in
+ // a browser).
+ cmds = append(cmds, []string{"chrome", "google-chrome", "chromium", "firefox", "sensible-browser"}...)
+ if os.Getenv("DISPLAY") != "" {
+ // xdg-open is only for use in a desktop environment.
+ cmds = append(cmds, "xdg-open")
+ }
+ }
+ return cmds
+}
+
+var kcachegrind = []string{"kcachegrind"}
+
+// awayFromTTY saves the output in a file if it would otherwise go to
+// the terminal screen. This is used to avoid dumping binary data on
+// the screen.
+func awayFromTTY(format string) PostProcessor {
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ if output == os.Stdout && (ui.IsTerminal() || interactiveMode) {
+ tempFile, err := newTempFile("", "profile", "."+format)
+ if err != nil {
+ return err
+ }
+ ui.PrintErr("Generating report in ", tempFile.Name())
+ output = tempFile
+ }
+ _, err := io.Copy(output, input)
+ return err
+ }
+}
+
+func invokeDot(format string) PostProcessor {
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ cmd := exec.Command("dot", "-T"+format)
+ cmd.Stdin, cmd.Stdout, cmd.Stderr = input, output, os.Stderr
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("failed to execute dot. Is Graphviz installed? Error: %v", err)
+ }
+ return nil
+ }
+}
+
+// massageDotSVG invokes the dot tool to generate an SVG image and alters
+// the image to have panning capabilities when viewed in a browser.
+func massageDotSVG() PostProcessor {
+ generateSVG := invokeDot("svg")
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ baseSVG := new(bytes.Buffer)
+ if err := generateSVG(input, baseSVG, ui); err != nil {
+ return err
+ }
+ _, err := output.Write([]byte(massageSVG(baseSVG.String())))
+ return err
+ }
+}
+
+func invokeVisualizer(suffix string, visualizers []string) PostProcessor {
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ tempFile, err := newTempFile(os.TempDir(), "pprof", "."+suffix)
+ if err != nil {
+ return err
+ }
+ deferDeleteTempFile(tempFile.Name())
+ if _, err := io.Copy(tempFile, input); err != nil {
+ return err
+ }
+ tempFile.Close()
+ // Try visualizers until one is successful
+ for _, v := range visualizers {
+ // Separate command and arguments for exec.Command.
+ args := strings.Split(v, " ")
+ if len(args) == 0 {
+ continue
+ }
+ viewer := exec.Command(args[0], append(args[1:], tempFile.Name())...)
+ viewer.Stderr = os.Stderr
+ if err = viewer.Start(); err == nil {
+ // Wait for a second so that the visualizer has a chance to
+ // open the input file. This needs to be done even if we're
+ // waiting for the visualizer as it can be just a wrapper that
+ // spawns a browser tab and returns right away.
+ defer func(t <-chan time.Time) {
+ <-t
+ }(time.After(time.Second))
+ // On interactive mode, let the visualizer run in the background
+ // so other commands can be issued.
+ if !interactiveMode {
+ return viewer.Wait()
+ }
+ return nil
+ }
+ }
+ return err
+ }
+}
+
+// stringToBool is a custom parser for bools. We avoid using strconv.ParseBool
+// to remain compatible with old pprof behavior (e.g., treating "" as true).
+func stringToBool(s string) (bool, error) {
+ switch strings.ToLower(s) {
+ case "true", "t", "yes", "y", "1", "":
+ return true, nil
+ case "false", "f", "no", "n", "0":
+ return false, nil
+ default:
+ return false, fmt.Errorf(`illegal value "%s" for bool variable`, s)
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/config.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/config.go
new file mode 100644
index 0000000..9fcdd45
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/config.go
@@ -0,0 +1,371 @@
+package driver
+
+import (
+ "fmt"
+ "net/url"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+)
+
+// config holds settings for a single named config.
+// The JSON tag name for a field is used both for JSON encoding and as
+// a named variable.
+type config struct {
+ // Filename for file-based output formats, stdout by default.
+ Output string `json:"-"`
+
+ // Display options.
+ CallTree bool `json:"call_tree,omitempty"`
+ RelativePercentages bool `json:"relative_percentages,omitempty"`
+ Unit string `json:"unit,omitempty"`
+ CompactLabels bool `json:"compact_labels,omitempty"`
+ SourcePath string `json:"-"`
+ TrimPath string `json:"-"`
+ IntelSyntax bool `json:"intel_syntax,omitempty"`
+ Mean bool `json:"mean,omitempty"`
+ SampleIndex string `json:"-"`
+ DivideBy float64 `json:"-"`
+ Normalize bool `json:"normalize,omitempty"`
+ Sort string `json:"sort,omitempty"`
+
+ // Label pseudo stack frame generation options
+ TagRoot string `json:"tagroot,omitempty"`
+ TagLeaf string `json:"tagleaf,omitempty"`
+
+ // Filtering options
+ DropNegative bool `json:"drop_negative,omitempty"`
+ NodeCount int `json:"nodecount,omitempty"`
+ NodeFraction float64 `json:"nodefraction,omitempty"`
+ EdgeFraction float64 `json:"edgefraction,omitempty"`
+ Trim bool `json:"trim,omitempty"`
+ Focus string `json:"focus,omitempty"`
+ Ignore string `json:"ignore,omitempty"`
+ PruneFrom string `json:"prune_from,omitempty"`
+ Hide string `json:"hide,omitempty"`
+ Show string `json:"show,omitempty"`
+ ShowFrom string `json:"show_from,omitempty"`
+ TagFocus string `json:"tagfocus,omitempty"`
+ TagIgnore string `json:"tagignore,omitempty"`
+ TagShow string `json:"tagshow,omitempty"`
+ TagHide string `json:"taghide,omitempty"`
+ NoInlines bool `json:"noinlines,omitempty"`
+
+ // Output granularity
+ Granularity string `json:"granularity,omitempty"`
+}
+
+// defaultConfig returns the default configuration values; it is unaffected by
+// flags and interactive assignments.
+func defaultConfig() config {
+ return config{
+ Unit: "minimum",
+ NodeCount: -1,
+ NodeFraction: 0.005,
+ EdgeFraction: 0.001,
+ Trim: true,
+ DivideBy: 1.0,
+ Sort: "flat",
+ Granularity: "functions",
+ }
+}
+
+// currentConfig holds the current configuration values; it is affected by
+// flags and interactive assignments.
+var currentCfg = defaultConfig()
+var currentMu sync.Mutex
+
+func currentConfig() config {
+ currentMu.Lock()
+ defer currentMu.Unlock()
+ return currentCfg
+}
+
+func setCurrentConfig(cfg config) {
+ currentMu.Lock()
+ defer currentMu.Unlock()
+ currentCfg = cfg
+}
+
+// configField contains metadata for a single configuration field.
+type configField struct {
+ name string // JSON field name/key in variables
+ urlparam string // URL parameter name
+ saved bool // Is field saved in settings?
+ field reflect.StructField // Field in config
+ choices []string // Name Of variables in group
+ defaultValue string // Default value for this field.
+}
+
+var (
+ configFields []configField // Precomputed metadata per config field
+
+ // configFieldMap holds an entry for every config field as well as an
+ // entry for every valid choice for a multi-choice field.
+ configFieldMap map[string]configField
+)
+
+func init() {
+ // Config names for fields that are not saved in settings and therefore
+ // do not have a JSON name.
+ notSaved := map[string]string{
+ // Not saved in settings, but present in URLs.
+ "SampleIndex": "sample_index",
+
+ // Following fields are also not placed in URLs.
+ "Output": "output",
+ "SourcePath": "source_path",
+ "TrimPath": "trim_path",
+ "DivideBy": "divide_by",
+ }
+
+ // choices holds the list of allowed values for config fields that can
+ // take on one of a bounded set of values.
+ choices := map[string][]string{
+ "sort": {"cum", "flat"},
+ "granularity": {"functions", "filefunctions", "files", "lines", "addresses"},
+ }
+
+ // urlparam holds the mapping from a config field name to the URL
+ // parameter used to hold that config field. If no entry is present for
+ // a name, the corresponding field is not saved in URLs.
+ urlparam := map[string]string{
+ "drop_negative": "dropneg",
+ "call_tree": "calltree",
+ "relative_percentages": "rel",
+ "unit": "unit",
+ "compact_labels": "compact",
+ "intel_syntax": "intel",
+ "nodecount": "n",
+ "nodefraction": "nf",
+ "edgefraction": "ef",
+ "trim": "trim",
+ "focus": "f",
+ "ignore": "i",
+ "prune_from": "prunefrom",
+ "hide": "h",
+ "show": "s",
+ "show_from": "sf",
+ "tagfocus": "tf",
+ "tagignore": "ti",
+ "tagshow": "ts",
+ "taghide": "th",
+ "mean": "mean",
+ "sample_index": "si",
+ "normalize": "norm",
+ "sort": "sort",
+ "granularity": "g",
+ "noinlines": "noinlines",
+ }
+
+ def := defaultConfig()
+ configFieldMap = map[string]configField{}
+ t := reflect.TypeOf(config{})
+ for i, n := 0, t.NumField(); i < n; i++ {
+ field := t.Field(i)
+ js := strings.Split(field.Tag.Get("json"), ",")
+ if len(js) == 0 {
+ continue
+ }
+ // Get the configuration name for this field.
+ name := js[0]
+ if name == "-" {
+ name = notSaved[field.Name]
+ if name == "" {
+ // Not a configurable field.
+ continue
+ }
+ }
+ f := configField{
+ name: name,
+ urlparam: urlparam[name],
+ saved: (name == js[0]),
+ field: field,
+ choices: choices[name],
+ }
+ f.defaultValue = def.get(f)
+ configFields = append(configFields, f)
+ configFieldMap[f.name] = f
+ for _, choice := range f.choices {
+ configFieldMap[choice] = f
+ }
+ }
+}
+
+// fieldPtr returns a pointer to the field identified by f in *cfg.
+func (cfg *config) fieldPtr(f configField) interface{} {
+ // reflect.ValueOf: converts to reflect.Value
+ // Elem: dereferences cfg to make *cfg
+ // FieldByIndex: fetches the field
+ // Addr: takes address of field
+ // Interface: converts back from reflect.Value to a regular value
+ return reflect.ValueOf(cfg).Elem().FieldByIndex(f.field.Index).Addr().Interface()
+}
+
+// get returns the value of field f in cfg.
+func (cfg *config) get(f configField) string {
+ switch ptr := cfg.fieldPtr(f).(type) {
+ case *string:
+ return *ptr
+ case *int:
+ return fmt.Sprint(*ptr)
+ case *float64:
+ return fmt.Sprint(*ptr)
+ case *bool:
+ return fmt.Sprint(*ptr)
+ }
+ panic(fmt.Sprintf("unsupported config field type %v", f.field.Type))
+}
+
+// set sets the value of field f in cfg to value.
+func (cfg *config) set(f configField, value string) error {
+ switch ptr := cfg.fieldPtr(f).(type) {
+ case *string:
+ if len(f.choices) > 0 {
+ // Verify that value is one of the allowed choices.
+ for _, choice := range f.choices {
+ if choice == value {
+ *ptr = value
+ return nil
+ }
+ }
+ return fmt.Errorf("invalid %q value %q", f.name, value)
+ }
+ *ptr = value
+ case *int:
+ v, err := strconv.Atoi(value)
+ if err != nil {
+ return err
+ }
+ *ptr = v
+ case *float64:
+ v, err := strconv.ParseFloat(value, 64)
+ if err != nil {
+ return err
+ }
+ *ptr = v
+ case *bool:
+ v, err := stringToBool(value)
+ if err != nil {
+ return err
+ }
+ *ptr = v
+ default:
+ panic(fmt.Sprintf("unsupported config field type %v", f.field.Type))
+ }
+ return nil
+}
+
+// isConfigurable returns true if name is either the name of a config field, or
+// a valid value for a multi-choice config field.
+func isConfigurable(name string) bool {
+ _, ok := configFieldMap[name]
+ return ok
+}
+
+// isBoolConfig returns true if name is either name of a boolean config field,
+// or a valid value for a multi-choice config field.
+func isBoolConfig(name string) bool {
+ f, ok := configFieldMap[name]
+ if !ok {
+ return false
+ }
+ if name != f.name {
+ return true // name must be one possible value for the field
+ }
+ var cfg config
+ _, ok = cfg.fieldPtr(f).(*bool)
+ return ok
+}
+
+// completeConfig returns the list of configurable names starting with prefix.
+func completeConfig(prefix string) []string {
+ var result []string
+ for v := range configFieldMap {
+ if strings.HasPrefix(v, prefix) {
+ result = append(result, v)
+ }
+ }
+ return result
+}
+
+// configure stores the name=value mapping into the current config, correctly
+// handling the case when name identifies a particular choice in a field.
+func configure(name, value string) error {
+ currentMu.Lock()
+ defer currentMu.Unlock()
+ f, ok := configFieldMap[name]
+ if !ok {
+ return fmt.Errorf("unknown config field %q", name)
+ }
+ if f.name == name {
+ return currentCfg.set(f, value)
+ }
+ // name must be one of the choices. If value is true, set field-value
+ // to name.
+ if v, err := strconv.ParseBool(value); v && err == nil {
+ return currentCfg.set(f, name)
+ }
+ return fmt.Errorf("unknown config field %q", name)
+}
+
+// resetTransient sets all transient fields in *cfg to their currently
+// configured values.
+func (cfg *config) resetTransient() {
+ current := currentConfig()
+ cfg.Output = current.Output
+ cfg.SourcePath = current.SourcePath
+ cfg.TrimPath = current.TrimPath
+ cfg.DivideBy = current.DivideBy
+ cfg.SampleIndex = current.SampleIndex
+}
+
+// applyURL updates *cfg based on params.
+func (cfg *config) applyURL(params url.Values) error {
+ for _, f := range configFields {
+ var value string
+ if f.urlparam != "" {
+ value = params.Get(f.urlparam)
+ }
+ if value == "" {
+ continue
+ }
+ if err := cfg.set(f, value); err != nil {
+ return fmt.Errorf("error setting config field %s: %v", f.name, err)
+ }
+ }
+ return nil
+}
+
+// makeURL returns a URL based on initialURL that contains the config contents
+// as parameters. The second result is true iff a parameter value was changed.
+func (cfg *config) makeURL(initialURL url.URL) (url.URL, bool) {
+ q := initialURL.Query()
+ changed := false
+ for _, f := range configFields {
+ if f.urlparam == "" || !f.saved {
+ continue
+ }
+ v := cfg.get(f)
+ if v == f.defaultValue {
+ v = "" // URL for of default value is the empty string.
+ } else if f.field.Type.Kind() == reflect.Bool {
+ // Shorten bool values to "f" or "t"
+ v = v[:1]
+ }
+ if q.Get(f.urlparam) == v {
+ continue
+ }
+ changed = true
+ if v == "" {
+ q.Del(f.urlparam)
+ } else {
+ q.Set(f.urlparam, v)
+ }
+ }
+ if changed {
+ initialURL.RawQuery = q.Encode()
+ }
+ return initialURL, changed
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go
new file mode 100644
index 0000000..27681c5
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go
@@ -0,0 +1,386 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package driver implements the core pprof functionality. It can be
+// parameterized with a flag implementation, fetch and symbolize
+// mechanisms.
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+ "github.com/google/pprof/profile"
+)
+
+// PProf acquires a profile, and symbolizes it using a profile
+// manager. Then it generates a report formatted according to the
+// options selected through the flags package.
+func PProf(eo *plugin.Options) error {
+ // Remove any temporary files created during pprof processing.
+ defer cleanupTempFiles()
+
+ o := setDefaults(eo)
+
+ src, cmd, err := parseFlags(o)
+ if err != nil {
+ return err
+ }
+
+ p, err := fetchProfiles(src, o)
+ if err != nil {
+ return err
+ }
+
+ if cmd != nil {
+ return generateReport(p, cmd, currentConfig(), o)
+ }
+
+ if src.HTTPHostport != "" {
+ return serveWebInterface(src.HTTPHostport, p, o, src.HTTPDisableBrowser)
+ }
+ return interactive(p, o)
+}
+
+// generateRawReport is allowed to modify p.
+func generateRawReport(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) (*command, *report.Report, error) {
+ // Identify units of numeric tags in profile.
+ numLabelUnits := identifyNumLabelUnits(p, o.UI)
+
+ // Get report output format
+ c := pprofCommands[cmd[0]]
+ if c == nil {
+ panic("unexpected nil command")
+ }
+
+ cfg = applyCommandOverrides(cmd[0], c.format, cfg)
+
+ // Create label pseudo nodes before filtering, in case the filters use
+ // the generated nodes.
+ generateTagRootsLeaves(p, cfg, o.UI)
+
+ // Delay focus after configuring report to get percentages on all samples.
+ relative := cfg.RelativePercentages
+ if relative {
+ if err := applyFocus(p, numLabelUnits, cfg, o.UI); err != nil {
+ return nil, nil, err
+ }
+ }
+ ropt, err := reportOptions(p, numLabelUnits, cfg)
+ if err != nil {
+ return nil, nil, err
+ }
+ ropt.OutputFormat = c.format
+ if len(cmd) == 2 {
+ s, err := regexp.Compile(cmd[1])
+ if err != nil {
+ return nil, nil, fmt.Errorf("parsing argument regexp %s: %v", cmd[1], err)
+ }
+ ropt.Symbol = s
+ }
+
+ rpt := report.New(p, ropt)
+ if !relative {
+ if err := applyFocus(p, numLabelUnits, cfg, o.UI); err != nil {
+ return nil, nil, err
+ }
+ }
+ if err := aggregate(p, cfg); err != nil {
+ return nil, nil, err
+ }
+
+ return c, rpt, nil
+}
+
+// generateReport is allowed to modify p.
+func generateReport(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) error {
+ c, rpt, err := generateRawReport(p, cmd, cfg, o)
+ if err != nil {
+ return err
+ }
+
+ // Generate the report.
+ dst := new(bytes.Buffer)
+ if err := report.Generate(dst, rpt, o.Obj); err != nil {
+ return err
+ }
+ src := dst
+
+ // If necessary, perform any data post-processing.
+ if c.postProcess != nil {
+ dst = new(bytes.Buffer)
+ if err := c.postProcess(src, dst, o.UI); err != nil {
+ return err
+ }
+ src = dst
+ }
+
+ // If no output is specified, use default visualizer.
+ output := cfg.Output
+ if output == "" {
+ if c.visualizer != nil {
+ return c.visualizer(src, os.Stdout, o.UI)
+ }
+ _, err := src.WriteTo(os.Stdout)
+ return err
+ }
+
+ // Output to specified file.
+ o.UI.PrintErr("Generating report in ", output)
+ out, err := o.Writer.Open(output)
+ if err != nil {
+ return err
+ }
+ if _, err := src.WriteTo(out); err != nil {
+ out.Close()
+ return err
+ }
+ return out.Close()
+}
+
+func applyCommandOverrides(cmd string, outputFormat int, cfg config) config {
+ // Some report types override the trim flag to false below. This is to make
+ // sure the default heuristics of excluding insignificant nodes and edges
+ // from the call graph do not apply. One example where it is important is
+ // annotated source or disassembly listing. Those reports run on a specific
+ // function (or functions), but the trimming is applied before the function
+ // data is selected. So, with trimming enabled, the report could end up
+ // showing no data if the specified function is "uninteresting" as far as the
+ // trimming is concerned.
+ trim := cfg.Trim
+
+ switch cmd {
+ case "disasm":
+ trim = false
+ cfg.Granularity = "addresses"
+ // Force the 'noinlines' mode so that source locations for a given address
+ // collapse and there is only one for the given address. Without this
+ // cumulative metrics would be double-counted when annotating the assembly.
+ // This is because the merge is done by address and in case of an inlined
+ // stack each of the inlined entries is a separate callgraph node.
+ cfg.NoInlines = true
+ case "weblist":
+ trim = false
+ cfg.Granularity = "addresses"
+ cfg.NoInlines = false // Need inline info to support call expansion
+ case "peek":
+ trim = false
+ case "list":
+ trim = false
+ cfg.Granularity = "lines"
+ // Do not force 'noinlines' to be false so that specifying
+ // "-list foo -noinlines" is supported and works as expected.
+ case "text", "top", "topproto":
+ if cfg.NodeCount == -1 {
+ cfg.NodeCount = 0
+ }
+ default:
+ if cfg.NodeCount == -1 {
+ cfg.NodeCount = 80
+ }
+ }
+
+ switch outputFormat {
+ case report.Proto, report.Raw, report.Callgrind:
+ trim = false
+ cfg.Granularity = "addresses"
+ }
+
+ if !trim {
+ cfg.NodeCount = 0
+ cfg.NodeFraction = 0
+ cfg.EdgeFraction = 0
+ }
+ return cfg
+}
+
+// generateTagRootsLeaves generates extra nodes from the tagroot and tagleaf options.
+func generateTagRootsLeaves(prof *profile.Profile, cfg config, ui plugin.UI) {
+ tagRootLabelKeys := dropEmptyStrings(strings.Split(cfg.TagRoot, ","))
+ tagLeafLabelKeys := dropEmptyStrings(strings.Split(cfg.TagLeaf, ","))
+ rootm, leafm := addLabelNodes(prof, tagRootLabelKeys, tagLeafLabelKeys, cfg.Unit)
+ warnNoMatches(cfg.TagRoot == "" || rootm, "TagRoot", ui)
+ warnNoMatches(cfg.TagLeaf == "" || leafm, "TagLeaf", ui)
+}
+
+// dropEmptyStrings filters a slice to only non-empty strings
+func dropEmptyStrings(in []string) (out []string) {
+ for _, s := range in {
+ if s != "" {
+ out = append(out, s)
+ }
+ }
+ return
+}
+
+func aggregate(prof *profile.Profile, cfg config) error {
+ var function, filename, linenumber, address bool
+ inlines := !cfg.NoInlines
+ switch cfg.Granularity {
+ case "addresses":
+ if inlines {
+ return nil
+ }
+ function = true
+ filename = true
+ linenumber = true
+ address = true
+ case "lines":
+ function = true
+ filename = true
+ linenumber = true
+ case "files":
+ filename = true
+ case "functions":
+ function = true
+ case "filefunctions":
+ function = true
+ filename = true
+ default:
+ return fmt.Errorf("unexpected granularity")
+ }
+ return prof.Aggregate(inlines, function, filename, linenumber, address)
+}
+
+func reportOptions(p *profile.Profile, numLabelUnits map[string]string, cfg config) (*report.Options, error) {
+ si, mean := cfg.SampleIndex, cfg.Mean
+ value, meanDiv, sample, err := sampleFormat(p, si, mean)
+ if err != nil {
+ return nil, err
+ }
+
+ stype := sample.Type
+ if mean {
+ stype = "mean_" + stype
+ }
+
+ if cfg.DivideBy == 0 {
+ return nil, fmt.Errorf("zero divisor specified")
+ }
+
+ var filters []string
+ addFilter := func(k string, v string) {
+ if v != "" {
+ filters = append(filters, k+"="+v)
+ }
+ }
+ addFilter("focus", cfg.Focus)
+ addFilter("ignore", cfg.Ignore)
+ addFilter("hide", cfg.Hide)
+ addFilter("show", cfg.Show)
+ addFilter("show_from", cfg.ShowFrom)
+ addFilter("tagfocus", cfg.TagFocus)
+ addFilter("tagignore", cfg.TagIgnore)
+ addFilter("tagshow", cfg.TagShow)
+ addFilter("taghide", cfg.TagHide)
+
+ ropt := &report.Options{
+ CumSort: cfg.Sort == "cum",
+ CallTree: cfg.CallTree,
+ DropNegative: cfg.DropNegative,
+
+ CompactLabels: cfg.CompactLabels,
+ Ratio: 1 / cfg.DivideBy,
+
+ NodeCount: cfg.NodeCount,
+ NodeFraction: cfg.NodeFraction,
+ EdgeFraction: cfg.EdgeFraction,
+
+ ActiveFilters: filters,
+ NumLabelUnits: numLabelUnits,
+
+ SampleValue: value,
+ SampleMeanDivisor: meanDiv,
+ SampleType: stype,
+ SampleUnit: sample.Unit,
+
+ OutputUnit: cfg.Unit,
+
+ SourcePath: cfg.SourcePath,
+ TrimPath: cfg.TrimPath,
+
+ IntelSyntax: cfg.IntelSyntax,
+ }
+
+ if len(p.Mapping) > 0 && p.Mapping[0].File != "" {
+ ropt.Title = filepath.Base(p.Mapping[0].File)
+ }
+
+ return ropt, nil
+}
+
+// identifyNumLabelUnits returns a map of numeric label keys to the units
+// associated with those keys.
+func identifyNumLabelUnits(p *profile.Profile, ui plugin.UI) map[string]string {
+ numLabelUnits, ignoredUnits := p.NumLabelUnits()
+
+ // Print errors for tags with multiple units associated with
+ // a single key.
+ for k, units := range ignoredUnits {
+ ui.PrintErr(fmt.Sprintf("For tag %s used unit %s, also encountered unit(s) %s", k, numLabelUnits[k], strings.Join(units, ", ")))
+ }
+ return numLabelUnits
+}
+
+type sampleValueFunc func([]int64) int64
+
+// sampleFormat returns a function to extract values out of a profile.Sample,
+// and the type/units of those values.
+func sampleFormat(p *profile.Profile, sampleIndex string, mean bool) (value, meanDiv sampleValueFunc, v *profile.ValueType, err error) {
+ if len(p.SampleType) == 0 {
+ return nil, nil, nil, fmt.Errorf("profile has no samples")
+ }
+ index, err := p.SampleIndexByName(sampleIndex)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+ value = valueExtractor(index)
+ if mean {
+ meanDiv = valueExtractor(0)
+ }
+ v = p.SampleType[index]
+ return
+}
+
+func valueExtractor(ix int) sampleValueFunc {
+ return func(v []int64) int64 {
+ return v[ix]
+ }
+}
+
+// profileCopier can be used to obtain a fresh copy of a profile.
+// It is useful since reporting code may mutate the profile handed to it.
+type profileCopier []byte
+
+func makeProfileCopier(src *profile.Profile) profileCopier {
+ // Pre-serialize the profile. We will deserialize every time a fresh copy is needed.
+ var buf bytes.Buffer
+ src.WriteUncompressed(&buf)
+ return profileCopier(buf.Bytes())
+}
+
+// newCopy returns a new copy of the profile.
+func (c profileCopier) newCopy() *profile.Profile {
+ p, err := profile.ParseUncompressed([]byte(c))
+ if err != nil {
+ panic(err)
+ }
+ return p
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go
new file mode 100644
index 0000000..fd05adb
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go
@@ -0,0 +1,219 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+var tagFilterRangeRx = regexp.MustCompile("([+-]?[[:digit:]]+)([[:alpha:]]+)?")
+
+// applyFocus filters samples based on the focus/ignore options
+func applyFocus(prof *profile.Profile, numLabelUnits map[string]string, cfg config, ui plugin.UI) error {
+ focus, err := compileRegexOption("focus", cfg.Focus, nil)
+ ignore, err := compileRegexOption("ignore", cfg.Ignore, err)
+ hide, err := compileRegexOption("hide", cfg.Hide, err)
+ show, err := compileRegexOption("show", cfg.Show, err)
+ showfrom, err := compileRegexOption("show_from", cfg.ShowFrom, err)
+ tagfocus, err := compileTagFilter("tagfocus", cfg.TagFocus, numLabelUnits, ui, err)
+ tagignore, err := compileTagFilter("tagignore", cfg.TagIgnore, numLabelUnits, ui, err)
+ prunefrom, err := compileRegexOption("prune_from", cfg.PruneFrom, err)
+ if err != nil {
+ return err
+ }
+
+ fm, im, hm, hnm := prof.FilterSamplesByName(focus, ignore, hide, show)
+ warnNoMatches(focus == nil || fm, "Focus", ui)
+ warnNoMatches(ignore == nil || im, "Ignore", ui)
+ warnNoMatches(hide == nil || hm, "Hide", ui)
+ warnNoMatches(show == nil || hnm, "Show", ui)
+
+ sfm := prof.ShowFrom(showfrom)
+ warnNoMatches(showfrom == nil || sfm, "ShowFrom", ui)
+
+ tfm, tim := prof.FilterSamplesByTag(tagfocus, tagignore)
+ warnNoMatches(tagfocus == nil || tfm, "TagFocus", ui)
+ warnNoMatches(tagignore == nil || tim, "TagIgnore", ui)
+
+ tagshow, err := compileRegexOption("tagshow", cfg.TagShow, err)
+ taghide, err := compileRegexOption("taghide", cfg.TagHide, err)
+ tns, tnh := prof.FilterTagsByName(tagshow, taghide)
+ warnNoMatches(tagshow == nil || tns, "TagShow", ui)
+ warnNoMatches(taghide == nil || tnh, "TagHide", ui)
+
+ if prunefrom != nil {
+ prof.PruneFrom(prunefrom)
+ }
+ return err
+}
+
+func compileRegexOption(name, value string, err error) (*regexp.Regexp, error) {
+ if value == "" || err != nil {
+ return nil, err
+ }
+ rx, err := regexp.Compile(value)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s regexp: %v", name, err)
+ }
+ return rx, nil
+}
+
+func compileTagFilter(name, value string, numLabelUnits map[string]string, ui plugin.UI, err error) (func(*profile.Sample) bool, error) {
+ if value == "" || err != nil {
+ return nil, err
+ }
+
+ tagValuePair := strings.SplitN(value, "=", 2)
+ var wantKey string
+ if len(tagValuePair) == 2 {
+ wantKey = tagValuePair[0]
+ value = tagValuePair[1]
+ }
+
+ if numFilter := parseTagFilterRange(value); numFilter != nil {
+ ui.PrintErr(name, ":Interpreted '", value, "' as range, not regexp")
+ labelFilter := func(vals []int64, unit string) bool {
+ for _, val := range vals {
+ if numFilter(val, unit) {
+ return true
+ }
+ }
+ return false
+ }
+ numLabelUnit := func(key string) string {
+ return numLabelUnits[key]
+ }
+ if wantKey == "" {
+ return func(s *profile.Sample) bool {
+ for key, vals := range s.NumLabel {
+ if labelFilter(vals, numLabelUnit(key)) {
+ return true
+ }
+ }
+ return false
+ }, nil
+ }
+ return func(s *profile.Sample) bool {
+ if vals, ok := s.NumLabel[wantKey]; ok {
+ return labelFilter(vals, numLabelUnit(wantKey))
+ }
+ return false
+ }, nil
+ }
+
+ var rfx []*regexp.Regexp
+ for _, tagf := range strings.Split(value, ",") {
+ fx, err := regexp.Compile(tagf)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s regexp: %v", name, err)
+ }
+ rfx = append(rfx, fx)
+ }
+ if wantKey == "" {
+ return func(s *profile.Sample) bool {
+ matchedrx:
+ for _, rx := range rfx {
+ for key, vals := range s.Label {
+ for _, val := range vals {
+ // TODO: Match against val, not key:val in future
+ if rx.MatchString(key + ":" + val) {
+ continue matchedrx
+ }
+ }
+ }
+ return false
+ }
+ return true
+ }, nil
+ }
+ return func(s *profile.Sample) bool {
+ if vals, ok := s.Label[wantKey]; ok {
+ for _, rx := range rfx {
+ for _, val := range vals {
+ if rx.MatchString(val) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+ }, nil
+}
+
+// parseTagFilterRange returns a function to checks if a value is
+// contained on the range described by a string. It can recognize
+// strings of the form:
+// "32kb" -- matches values == 32kb
+// ":64kb" -- matches values <= 64kb
+// "4mb:" -- matches values >= 4mb
+// "12kb:64mb" -- matches values between 12kb and 64mb (both included).
+func parseTagFilterRange(filter string) func(int64, string) bool {
+ ranges := tagFilterRangeRx.FindAllStringSubmatch(filter, 2)
+ if len(ranges) == 0 {
+ return nil // No ranges were identified
+ }
+ v, err := strconv.ParseInt(ranges[0][1], 10, 64)
+ if err != nil {
+ panic(fmt.Errorf("failed to parse int %s: %v", ranges[0][1], err))
+ }
+ scaledValue, unit := measurement.Scale(v, ranges[0][2], ranges[0][2])
+ if len(ranges) == 1 {
+ switch match := ranges[0][0]; filter {
+ case match:
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv == scaledValue
+ }
+ case match + ":":
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv >= scaledValue
+ }
+ case ":" + match:
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv <= scaledValue
+ }
+ }
+ return nil
+ }
+ if filter != ranges[0][0]+":"+ranges[1][0] {
+ return nil
+ }
+ if v, err = strconv.ParseInt(ranges[1][1], 10, 64); err != nil {
+ panic(fmt.Errorf("failed to parse int %s: %v", ranges[1][1], err))
+ }
+ scaledValue2, unit2 := measurement.Scale(v, ranges[1][2], unit)
+ if unit != unit2 {
+ return nil
+ }
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv >= scaledValue && sv <= scaledValue2
+ }
+}
+
+func warnNoMatches(match bool, option string, ui plugin.UI) {
+ if !match {
+ ui.PrintErr(option + " expression matched no samples")
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go
new file mode 100644
index 0000000..584c5d8
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go
@@ -0,0 +1,616 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// fetchProfiles fetches and symbolizes the profiles specified by s.
+// It will merge all the profiles it is able to retrieve, even if
+// there are some failures. It will return an error if it is unable to
+// fetch any profiles.
+func fetchProfiles(s *source, o *plugin.Options) (*profile.Profile, error) {
+ sources := make([]profileSource, 0, len(s.Sources))
+ for _, src := range s.Sources {
+ sources = append(sources, profileSource{
+ addr: src,
+ source: s,
+ })
+ }
+
+ bases := make([]profileSource, 0, len(s.Base))
+ for _, src := range s.Base {
+ bases = append(bases, profileSource{
+ addr: src,
+ source: s,
+ })
+ }
+
+ p, pbase, m, mbase, save, err := grabSourcesAndBases(sources, bases, o.Fetch, o.Obj, o.UI, o.HTTPTransport)
+ if err != nil {
+ return nil, err
+ }
+
+ if pbase != nil {
+ if s.DiffBase {
+ pbase.SetLabel("pprof::base", []string{"true"})
+ }
+ if s.Normalize {
+ err := p.Normalize(pbase)
+ if err != nil {
+ return nil, err
+ }
+ }
+ pbase.Scale(-1)
+ p, m, err = combineProfiles([]*profile.Profile{p, pbase}, []plugin.MappingSources{m, mbase})
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Symbolize the merged profile.
+ if err := o.Sym.Symbolize(s.Symbolize, m, p); err != nil {
+ return nil, err
+ }
+ p.RemoveUninteresting()
+ unsourceMappings(p)
+
+ if s.Comment != "" {
+ p.Comments = append(p.Comments, s.Comment)
+ }
+
+ // Save a copy of the merged profile if there is at least one remote source.
+ if save {
+ dir, err := setTmpDir(o.UI)
+ if err != nil {
+ return nil, err
+ }
+
+ prefix := "pprof."
+ if len(p.Mapping) > 0 && p.Mapping[0].File != "" {
+ prefix += filepath.Base(p.Mapping[0].File) + "."
+ }
+ for _, s := range p.SampleType {
+ prefix += s.Type + "."
+ }
+
+ tempFile, err := newTempFile(dir, prefix, ".pb.gz")
+ if err == nil {
+ if err = p.Write(tempFile); err == nil {
+ o.UI.PrintErr("Saved profile in ", tempFile.Name())
+ }
+ }
+ if err != nil {
+ o.UI.PrintErr("Could not save profile: ", err)
+ }
+ }
+
+ if err := p.CheckValid(); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+func grabSourcesAndBases(sources, bases []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, *profile.Profile, plugin.MappingSources, plugin.MappingSources, bool, error) {
+ wg := sync.WaitGroup{}
+ wg.Add(2)
+ var psrc, pbase *profile.Profile
+ var msrc, mbase plugin.MappingSources
+ var savesrc, savebase bool
+ var errsrc, errbase error
+ var countsrc, countbase int
+ go func() {
+ defer wg.Done()
+ psrc, msrc, savesrc, countsrc, errsrc = chunkedGrab(sources, fetch, obj, ui, tr)
+ }()
+ go func() {
+ defer wg.Done()
+ pbase, mbase, savebase, countbase, errbase = chunkedGrab(bases, fetch, obj, ui, tr)
+ }()
+ wg.Wait()
+ save := savesrc || savebase
+
+ if errsrc != nil {
+ return nil, nil, nil, nil, false, fmt.Errorf("problem fetching source profiles: %v", errsrc)
+ }
+ if errbase != nil {
+ return nil, nil, nil, nil, false, fmt.Errorf("problem fetching base profiles: %v,", errbase)
+ }
+ if countsrc == 0 {
+ return nil, nil, nil, nil, false, fmt.Errorf("failed to fetch any source profiles")
+ }
+ if countbase == 0 && len(bases) > 0 {
+ return nil, nil, nil, nil, false, fmt.Errorf("failed to fetch any base profiles")
+ }
+ if want, got := len(sources), countsrc; want != got {
+ ui.PrintErr(fmt.Sprintf("Fetched %d source profiles out of %d", got, want))
+ }
+ if want, got := len(bases), countbase; want != got {
+ ui.PrintErr(fmt.Sprintf("Fetched %d base profiles out of %d", got, want))
+ }
+
+ return psrc, pbase, msrc, mbase, save, nil
+}
+
+// chunkedGrab fetches the profiles described in source and merges them into
+// a single profile. It fetches a chunk of profiles concurrently, with a maximum
+// chunk size to limit its memory usage.
+func chunkedGrab(sources []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, plugin.MappingSources, bool, int, error) {
+ const chunkSize = 128
+
+ var p *profile.Profile
+ var msrc plugin.MappingSources
+ var save bool
+ var count int
+
+ for start := 0; start < len(sources); start += chunkSize {
+ end := start + chunkSize
+ if end > len(sources) {
+ end = len(sources)
+ }
+ chunkP, chunkMsrc, chunkSave, chunkCount, chunkErr := concurrentGrab(sources[start:end], fetch, obj, ui, tr)
+ switch {
+ case chunkErr != nil:
+ return nil, nil, false, 0, chunkErr
+ case chunkP == nil:
+ continue
+ case p == nil:
+ p, msrc, save, count = chunkP, chunkMsrc, chunkSave, chunkCount
+ default:
+ p, msrc, chunkErr = combineProfiles([]*profile.Profile{p, chunkP}, []plugin.MappingSources{msrc, chunkMsrc})
+ if chunkErr != nil {
+ return nil, nil, false, 0, chunkErr
+ }
+ if chunkSave {
+ save = true
+ }
+ count += chunkCount
+ }
+ }
+
+ return p, msrc, save, count, nil
+}
+
+// concurrentGrab fetches multiple profiles concurrently
+func concurrentGrab(sources []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, plugin.MappingSources, bool, int, error) {
+ wg := sync.WaitGroup{}
+ wg.Add(len(sources))
+ for i := range sources {
+ go func(s *profileSource) {
+ defer wg.Done()
+ s.p, s.msrc, s.remote, s.err = grabProfile(s.source, s.addr, fetch, obj, ui, tr)
+ }(&sources[i])
+ }
+ wg.Wait()
+
+ var save bool
+ profiles := make([]*profile.Profile, 0, len(sources))
+ msrcs := make([]plugin.MappingSources, 0, len(sources))
+ for i := range sources {
+ s := &sources[i]
+ if err := s.err; err != nil {
+ ui.PrintErr(s.addr + ": " + err.Error())
+ continue
+ }
+ save = save || s.remote
+ profiles = append(profiles, s.p)
+ msrcs = append(msrcs, s.msrc)
+ *s = profileSource{}
+ }
+
+ if len(profiles) == 0 {
+ return nil, nil, false, 0, nil
+ }
+
+ p, msrc, err := combineProfiles(profiles, msrcs)
+ if err != nil {
+ return nil, nil, false, 0, err
+ }
+ return p, msrc, save, len(profiles), nil
+}
+
+func combineProfiles(profiles []*profile.Profile, msrcs []plugin.MappingSources) (*profile.Profile, plugin.MappingSources, error) {
+ // Merge profiles.
+ //
+ // The merge call below only treats exactly matching sample type lists as
+ // compatible and will fail otherwise. Make the profiles' sample types
+ // compatible for the merge, see CompatibilizeSampleTypes() doc for details.
+ if err := profile.CompatibilizeSampleTypes(profiles); err != nil {
+ return nil, nil, err
+ }
+ if err := measurement.ScaleProfiles(profiles); err != nil {
+ return nil, nil, err
+ }
+
+ // Avoid expensive work for the common case of a single profile/src.
+ if len(profiles) == 1 && len(msrcs) == 1 {
+ return profiles[0], msrcs[0], nil
+ }
+
+ p, err := profile.Merge(profiles)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // Combine mapping sources.
+ msrc := make(plugin.MappingSources)
+ for _, ms := range msrcs {
+ for m, s := range ms {
+ msrc[m] = append(msrc[m], s...)
+ }
+ }
+ return p, msrc, nil
+}
+
+type profileSource struct {
+ addr string
+ source *source
+
+ p *profile.Profile
+ msrc plugin.MappingSources
+ remote bool
+ err error
+}
+
+func homeEnv() string {
+ switch runtime.GOOS {
+ case "windows":
+ return "USERPROFILE"
+ case "plan9":
+ return "home"
+ default:
+ return "HOME"
+ }
+}
+
+// setTmpDir prepares the directory to use to save profiles retrieved
+// remotely. It is selected from PPROF_TMPDIR, defaults to $HOME/pprof, and, if
+// $HOME is not set, falls back to os.TempDir().
+func setTmpDir(ui plugin.UI) (string, error) {
+ var dirs []string
+ if profileDir := os.Getenv("PPROF_TMPDIR"); profileDir != "" {
+ dirs = append(dirs, profileDir)
+ }
+ if homeDir := os.Getenv(homeEnv()); homeDir != "" {
+ dirs = append(dirs, filepath.Join(homeDir, "pprof"))
+ }
+ dirs = append(dirs, os.TempDir())
+ for _, tmpDir := range dirs {
+ if err := os.MkdirAll(tmpDir, 0755); err != nil {
+ ui.PrintErr("Could not use temp dir ", tmpDir, ": ", err.Error())
+ continue
+ }
+ return tmpDir, nil
+ }
+ return "", fmt.Errorf("failed to identify temp dir")
+}
+
+const testSourceAddress = "pproftest.local"
+
+// grabProfile fetches a profile. Returns the profile, sources for the
+// profile mappings, a bool indicating if the profile was fetched
+// remotely, and an error.
+func grabProfile(s *source, source string, fetcher plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, msrc plugin.MappingSources, remote bool, err error) {
+ var src string
+ duration, timeout := time.Duration(s.Seconds)*time.Second, time.Duration(s.Timeout)*time.Second
+ if fetcher != nil {
+ p, src, err = fetcher.Fetch(source, duration, timeout)
+ if err != nil {
+ return
+ }
+ }
+ if err != nil || p == nil {
+ // Fetch the profile over HTTP or from a file.
+ p, src, err = fetch(source, duration, timeout, ui, tr)
+ if err != nil {
+ return
+ }
+ }
+
+ if err = p.CheckValid(); err != nil {
+ return
+ }
+
+ // Update the binary locations from command line and paths.
+ locateBinaries(p, s, obj, ui)
+
+ // Collect the source URL for all mappings.
+ if src != "" {
+ msrc = collectMappingSources(p, src)
+ remote = true
+ if strings.HasPrefix(src, "http://"+testSourceAddress) {
+ // Treat test inputs as local to avoid saving
+ // testcase profiles during driver testing.
+ remote = false
+ }
+ }
+ return
+}
+
+// collectMappingSources saves the mapping sources of a profile.
+func collectMappingSources(p *profile.Profile, source string) plugin.MappingSources {
+ ms := plugin.MappingSources{}
+ for _, m := range p.Mapping {
+ src := struct {
+ Source string
+ Start uint64
+ }{
+ source, m.Start,
+ }
+ key := m.BuildID
+ if key == "" {
+ key = m.File
+ }
+ if key == "" {
+ // If there is no build id or source file, use the source as the
+ // mapping file. This will enable remote symbolization for this
+ // mapping, in particular for Go profiles on the legacy format.
+ // The source is reset back to empty string by unsourceMapping
+ // which is called after symbolization is finished.
+ m.File = source
+ key = source
+ }
+ ms[key] = append(ms[key], src)
+ }
+ return ms
+}
+
+// unsourceMappings iterates over the mappings in a profile and replaces file
+// set to the remote source URL by collectMappingSources back to empty string.
+func unsourceMappings(p *profile.Profile) {
+ for _, m := range p.Mapping {
+ if m.BuildID == "" && filepath.VolumeName(m.File) == "" {
+ if u, err := url.Parse(m.File); err == nil && u.IsAbs() {
+ m.File = ""
+ }
+ }
+ }
+}
+
+// locateBinaries searches for binary files listed in the profile and, if found,
+// updates the profile accordingly.
+func locateBinaries(p *profile.Profile, s *source, obj plugin.ObjTool, ui plugin.UI) {
+ // Construct search path to examine
+ searchPath := os.Getenv("PPROF_BINARY_PATH")
+ if searchPath == "" {
+ // Use $HOME/pprof/binaries as default directory for local symbolization binaries
+ searchPath = filepath.Join(os.Getenv(homeEnv()), "pprof", "binaries")
+ }
+mapping:
+ for _, m := range p.Mapping {
+ var noVolumeFile string
+ var baseName string
+ var dirName string
+ if m.File != "" {
+ noVolumeFile = strings.TrimPrefix(m.File, filepath.VolumeName(m.File))
+ baseName = filepath.Base(m.File)
+ dirName = filepath.Dir(noVolumeFile)
+ }
+
+ for _, path := range filepath.SplitList(searchPath) {
+ var fileNames []string
+ if m.BuildID != "" {
+ fileNames = []string{filepath.Join(path, m.BuildID, baseName)}
+ if matches, err := filepath.Glob(filepath.Join(path, m.BuildID, "*")); err == nil {
+ fileNames = append(fileNames, matches...)
+ }
+ fileNames = append(fileNames, filepath.Join(path, noVolumeFile, m.BuildID)) // perf path format
+ // Llvm buildid protocol: the first two characters of the build id
+ // are used as directory, and the remaining part is in the filename.
+ // e.g. `/ab/cdef0123456.debug`
+ fileNames = append(fileNames, filepath.Join(path, m.BuildID[:2], m.BuildID[2:]+".debug"))
+ }
+ if m.File != "" {
+ // Try both the basename and the full path, to support the same directory
+ // structure as the perf symfs option.
+ fileNames = append(fileNames, filepath.Join(path, baseName))
+ fileNames = append(fileNames, filepath.Join(path, noVolumeFile))
+ // Other locations: use the same search paths as GDB, according to
+ // https://sourceware.org/gdb/onlinedocs/gdb/Separate-Debug-Files.html
+ fileNames = append(fileNames, filepath.Join(path, noVolumeFile+".debug"))
+ fileNames = append(fileNames, filepath.Join(path, dirName, ".debug", baseName+".debug"))
+ fileNames = append(fileNames, filepath.Join(path, "usr", "lib", "debug", dirName, baseName+".debug"))
+ }
+ for _, name := range fileNames {
+ if f, err := obj.Open(name, m.Start, m.Limit, m.Offset, m.KernelRelocationSymbol); err == nil {
+ defer f.Close()
+ fileBuildID := f.BuildID()
+ if m.BuildID != "" && m.BuildID != fileBuildID {
+ ui.PrintErr("Ignoring local file " + name + ": build-id mismatch (" + m.BuildID + " != " + fileBuildID + ")")
+ } else {
+ // Explicitly do not update KernelRelocationSymbol --
+ // the new local file name is most likely missing it.
+ m.File = name
+ continue mapping
+ }
+ }
+ }
+ }
+ }
+ if len(p.Mapping) == 0 {
+ // If there are no mappings, add a fake mapping to attempt symbolization.
+ // This is useful for some profiles generated by the golang runtime, which
+ // do not include any mappings. Symbolization with a fake mapping will only
+ // be successful against a non-PIE binary.
+ m := &profile.Mapping{ID: 1}
+ p.Mapping = []*profile.Mapping{m}
+ for _, l := range p.Location {
+ l.Mapping = m
+ }
+ }
+ // If configured, apply executable filename override and (maybe, see below)
+ // build ID override from source. Assume the executable is the first mapping.
+ if execName, buildID := s.ExecName, s.BuildID; execName != "" || buildID != "" {
+ m := p.Mapping[0]
+ if execName != "" {
+ // Explicitly do not update KernelRelocationSymbol --
+ // the source override is most likely missing it.
+ m.File = execName
+ }
+ // Only apply the build ID override if the build ID in the main mapping is
+ // missing. Overwriting the build ID in case it's present is very likely a
+ // wrong thing to do so we refuse to do that.
+ if buildID != "" && m.BuildID == "" {
+ m.BuildID = buildID
+ }
+ }
+}
+
+// fetch fetches a profile from source, within the timeout specified,
+// producing messages through the ui. It returns the profile and the
+// url of the actual source of the profile for remote profiles.
+func fetch(source string, duration, timeout time.Duration, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, src string, err error) {
+ var f io.ReadCloser
+
+ if sourceURL, timeout := adjustURL(source, duration, timeout); sourceURL != "" {
+ ui.Print("Fetching profile over HTTP from " + sourceURL)
+ if duration > 0 {
+ ui.Print(fmt.Sprintf("Please wait... (%v)", duration))
+ }
+ f, err = fetchURL(sourceURL, timeout, tr)
+ src = sourceURL
+ } else if isPerfFile(source) {
+ f, err = convertPerfData(source, ui)
+ } else {
+ f, err = os.Open(source)
+ }
+ if err == nil {
+ defer f.Close()
+ p, err = profile.Parse(f)
+ }
+ return
+}
+
+// fetchURL fetches a profile from a URL using HTTP.
+func fetchURL(source string, timeout time.Duration, tr http.RoundTripper) (io.ReadCloser, error) {
+ client := &http.Client{
+ Transport: tr,
+ Timeout: timeout + 5*time.Second,
+ }
+ resp, err := client.Get(source)
+ if err != nil {
+ return nil, fmt.Errorf("http fetch: %v", err)
+ }
+ if resp.StatusCode != http.StatusOK {
+ defer resp.Body.Close()
+ return nil, statusCodeError(resp)
+ }
+
+ return resp.Body, nil
+}
+
+func statusCodeError(resp *http.Response) error {
+ if resp.Header.Get("X-Go-Pprof") != "" && strings.Contains(resp.Header.Get("Content-Type"), "text/plain") {
+ // error is from pprof endpoint
+ if body, err := io.ReadAll(resp.Body); err == nil {
+ return fmt.Errorf("server response: %s - %s", resp.Status, body)
+ }
+ }
+ return fmt.Errorf("server response: %s", resp.Status)
+}
+
+// isPerfFile checks if a file is in perf.data format. It also returns false
+// if it encounters an error during the check.
+func isPerfFile(path string) bool {
+ sourceFile, openErr := os.Open(path)
+ if openErr != nil {
+ return false
+ }
+ defer sourceFile.Close()
+
+ // If the file is the output of a perf record command, it should begin
+ // with the string PERFILE2.
+ perfHeader := []byte("PERFILE2")
+ actualHeader := make([]byte, len(perfHeader))
+ if _, readErr := sourceFile.Read(actualHeader); readErr != nil {
+ return false
+ }
+ return bytes.Equal(actualHeader, perfHeader)
+}
+
+// convertPerfData converts the file at path which should be in perf.data format
+// using the perf_to_profile tool and returns the file containing the
+// profile.proto formatted data.
+func convertPerfData(perfPath string, ui plugin.UI) (*os.File, error) {
+ ui.Print(fmt.Sprintf(
+ "Converting %s to a profile.proto... (May take a few minutes)",
+ perfPath))
+ profile, err := newTempFile(os.TempDir(), "pprof_", ".pb.gz")
+ if err != nil {
+ return nil, err
+ }
+ deferDeleteTempFile(profile.Name())
+ cmd := exec.Command("perf_to_profile", "-i", perfPath, "-o", profile.Name(), "-f")
+ cmd.Stdout, cmd.Stderr = os.Stdout, os.Stderr
+ if err := cmd.Run(); err != nil {
+ profile.Close()
+ return nil, fmt.Errorf("failed to convert perf.data file. Try github.com/google/perf_data_converter: %v", err)
+ }
+ return profile, nil
+}
+
+// adjustURL validates if a profile source is a URL and returns an
+// cleaned up URL and the timeout to use for retrieval over HTTP.
+// If the source cannot be recognized as a URL it returns an empty string.
+func adjustURL(source string, duration, timeout time.Duration) (string, time.Duration) {
+ u, err := url.Parse(source)
+ if err != nil || (u.Host == "" && u.Scheme != "" && u.Scheme != "file") {
+ // Try adding http:// to catch sources of the form hostname:port/path.
+ // url.Parse treats "hostname" as the scheme.
+ u, err = url.Parse("http://" + source)
+ }
+ if err != nil || u.Host == "" {
+ return "", 0
+ }
+
+ // Apply duration/timeout overrides to URL.
+ values := u.Query()
+ if duration > 0 {
+ values.Set("seconds", fmt.Sprint(int(duration.Seconds())))
+ } else {
+ if urlSeconds := values.Get("seconds"); urlSeconds != "" {
+ if us, err := strconv.ParseInt(urlSeconds, 10, 32); err == nil {
+ duration = time.Duration(us) * time.Second
+ }
+ }
+ }
+ if timeout <= 0 {
+ if duration > 0 {
+ timeout = duration + duration/2
+ } else {
+ timeout = 60 * time.Second
+ }
+ }
+ u.RawQuery = values.Encode()
+ return u.String(), timeout
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go
new file mode 100644
index 0000000..5390319
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go
@@ -0,0 +1,71 @@
+// Copyright 2018 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "flag"
+ "strings"
+)
+
+// GoFlags implements the plugin.FlagSet interface.
+type GoFlags struct {
+ UsageMsgs []string
+}
+
+// Bool implements the plugin.FlagSet interface.
+func (*GoFlags) Bool(o string, d bool, c string) *bool {
+ return flag.Bool(o, d, c)
+}
+
+// Int implements the plugin.FlagSet interface.
+func (*GoFlags) Int(o string, d int, c string) *int {
+ return flag.Int(o, d, c)
+}
+
+// Float64 implements the plugin.FlagSet interface.
+func (*GoFlags) Float64(o string, d float64, c string) *float64 {
+ return flag.Float64(o, d, c)
+}
+
+// String implements the plugin.FlagSet interface.
+func (*GoFlags) String(o, d, c string) *string {
+ return flag.String(o, d, c)
+}
+
+// StringList implements the plugin.FlagSet interface.
+func (*GoFlags) StringList(o, d, c string) *[]*string {
+ return &[]*string{flag.String(o, d, c)}
+}
+
+// ExtraUsage implements the plugin.FlagSet interface.
+func (f *GoFlags) ExtraUsage() string {
+ return strings.Join(f.UsageMsgs, "\n")
+}
+
+// AddExtraUsage implements the plugin.FlagSet interface.
+func (f *GoFlags) AddExtraUsage(eu string) {
+ f.UsageMsgs = append(f.UsageMsgs, eu)
+}
+
+// Parse implements the plugin.FlagSet interface.
+func (*GoFlags) Parse(usage func()) []string {
+ flag.Usage = usage
+ flag.Parse()
+ args := flag.Args()
+ if len(args) == 0 {
+ usage()
+ }
+ return args
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go
new file mode 100644
index 0000000..fbeb765
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go
@@ -0,0 +1,106 @@
+// Copyright 2017 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "encoding/json"
+ "html/template"
+ "net/http"
+ "strings"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/report"
+)
+
+type treeNode struct {
+ Name string `json:"n"`
+ FullName string `json:"f"`
+ Cum int64 `json:"v"`
+ CumFormat string `json:"l"`
+ Percent string `json:"p"`
+ Children []*treeNode `json:"c"`
+}
+
+// flamegraph generates a web page containing a flamegraph.
+func (ui *webInterface) flamegraph(w http.ResponseWriter, req *http.Request) {
+ // Force the call tree so that the graph is a tree.
+ // Also do not trim the tree so that the flame graph contains all functions.
+ rpt, errList := ui.makeReport(w, req, []string{"svg"}, func(cfg *config) {
+ cfg.CallTree = true
+ cfg.Trim = false
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Generate dot graph.
+ g, config := report.GetDOT(rpt)
+ var nodes []*treeNode
+ nroots := 0
+ rootValue := int64(0)
+ nodeArr := []string{}
+ nodeMap := map[*graph.Node]*treeNode{}
+ // Make all nodes and the map, collect the roots.
+ for _, n := range g.Nodes {
+ v := n.CumValue()
+ fullName := n.Info.PrintableName()
+ node := &treeNode{
+ Name: graph.ShortenFunctionName(fullName),
+ FullName: fullName,
+ Cum: v,
+ CumFormat: config.FormatValue(v),
+ Percent: strings.TrimSpace(measurement.Percentage(v, config.Total)),
+ }
+ nodes = append(nodes, node)
+ if len(n.In) == 0 {
+ nodes[nroots], nodes[len(nodes)-1] = nodes[len(nodes)-1], nodes[nroots]
+ nroots++
+ rootValue += v
+ }
+ nodeMap[n] = node
+ // Get all node names into an array.
+ nodeArr = append(nodeArr, n.Info.Name)
+ }
+ // Populate the child links.
+ for _, n := range g.Nodes {
+ node := nodeMap[n]
+ for child := range n.Out {
+ node.Children = append(node.Children, nodeMap[child])
+ }
+ }
+
+ rootNode := &treeNode{
+ Name: "root",
+ FullName: "root",
+ Cum: rootValue,
+ CumFormat: config.FormatValue(rootValue),
+ Percent: strings.TrimSpace(measurement.Percentage(rootValue, config.Total)),
+ Children: nodes[0:nroots],
+ }
+
+ // JSON marshalling flame graph
+ b, err := json.Marshal(rootNode)
+ if err != nil {
+ http.Error(w, "error serializing flame graph", http.StatusInternalServerError)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ ui.render(w, req, "flamegraph", rpt, errList, config.Labels, webArgs{
+ FlameGraph: template.JS(b),
+ Nodes: nodeArr,
+ })
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.css b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.css
new file mode 100644
index 0000000..e0de53c
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.css
@@ -0,0 +1,273 @@
+* {
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+}
+html, body {
+ height: 100%;
+}
+body {
+ font-family: 'Roboto', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
+ font-size: 13px;
+ line-height: 1.4;
+ display: flex;
+ flex-direction: column;
+}
+a {
+ color: #2a66d9;
+}
+.header {
+ display: flex;
+ align-items: center;
+ height: 44px;
+ min-height: 44px;
+ background-color: #eee;
+ color: #212121;
+ padding: 0 1rem;
+}
+.header > div {
+ margin: 0 0.125em;
+}
+.header .title h1 {
+ font-size: 1.75em;
+ margin-right: 1rem;
+ margin-bottom: 4px;
+}
+.header .title a {
+ color: #212121;
+ text-decoration: none;
+}
+.header .title a:hover {
+ text-decoration: underline;
+}
+.header .description {
+ width: 100%;
+ text-align: right;
+ white-space: nowrap;
+}
+@media screen and (max-width: 799px) {
+ .header input {
+ display: none;
+ }
+}
+#detailsbox {
+ display: none;
+ z-index: 1;
+ position: fixed;
+ top: 40px;
+ right: 20px;
+ background-color: #ffffff;
+ box-shadow: 0 1px 5px rgba(0,0,0,.3);
+ line-height: 24px;
+ padding: 1em;
+ text-align: left;
+}
+.header input {
+ background: white url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' style='pointer-events:none;display:block;width:100%25;height:100%25;fill:%23757575'%3E%3Cpath d='M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61.0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z'/%3E%3C/svg%3E") no-repeat 4px center/20px 20px;
+ border: 1px solid #d1d2d3;
+ border-radius: 2px 0 0 2px;
+ padding: 0.25em;
+ padding-left: 28px;
+ margin-left: 1em;
+ font-family: 'Roboto', 'Noto', sans-serif;
+ font-size: 1em;
+ line-height: 24px;
+ color: #212121;
+}
+.downArrow {
+ border-top: .36em solid #ccc;
+ border-left: .36em solid transparent;
+ border-right: .36em solid transparent;
+ margin-bottom: .05em;
+ margin-left: .5em;
+ transition: border-top-color 200ms;
+}
+.menu-item {
+ height: 100%;
+ text-transform: uppercase;
+ font-family: 'Roboto Medium', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
+ position: relative;
+}
+.menu-item .menu-name:hover {
+ opacity: 0.75;
+}
+.menu-item .menu-name:hover .downArrow {
+ border-top-color: #666;
+}
+.menu-name {
+ height: 100%;
+ padding: 0 0.5em;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+.menu-name a {
+ text-decoration: none;
+ color: #212121;
+}
+.submenu {
+ display: none;
+ z-index: 1;
+ margin-top: -4px;
+ min-width: 10em;
+ position: absolute;
+ left: 0px;
+ background-color: white;
+ box-shadow: 0 1px 5px rgba(0,0,0,.3);
+ font-size: 100%;
+ text-transform: none;
+ white-space: nowrap;
+}
+.menu-item, .submenu {
+ user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ -webkit-user-select: none;
+}
+.submenu hr {
+ border: 0;
+ border-top: 2px solid #eee;
+}
+.submenu a {
+ display: block;
+ padding: .5em 1em;
+ text-decoration: none;
+}
+.submenu a:hover, .submenu a.active {
+ color: white;
+ background-color: #6b82d6;
+}
+.submenu a.disabled {
+ color: gray;
+ pointer-events: none;
+}
+.menu-check-mark {
+ position: absolute;
+ left: 2px;
+}
+.menu-delete-btn {
+ position: absolute;
+ right: 2px;
+}
+
+{{/* Used to disable events when a modal dialog is displayed */}}
+#dialog-overlay {
+ display: none;
+ position: fixed;
+ left: 0px;
+ top: 0px;
+ width: 100%;
+ height: 100%;
+ background-color: rgba(1,1,1,0.1);
+}
+
+.dialog {
+ {{/* Displayed centered horizontally near the top */}}
+ display: none;
+ position: fixed;
+ margin: 0px;
+ top: 60px;
+ left: 50%;
+ transform: translateX(-50%);
+
+ z-index: 3;
+ font-size: 125%;
+ background-color: #ffffff;
+ box-shadow: 0 1px 5px rgba(0,0,0,.3);
+}
+.dialog-header {
+ font-size: 120%;
+ border-bottom: 1px solid #CCCCCC;
+ width: 100%;
+ text-align: center;
+ background: #EEEEEE;
+ user-select: none;
+}
+.dialog-footer {
+ border-top: 1px solid #CCCCCC;
+ width: 100%;
+ text-align: right;
+ padding: 10px;
+}
+.dialog-error {
+ margin: 10px;
+ color: red;
+}
+.dialog input {
+ margin: 10px;
+ font-size: inherit;
+}
+.dialog button {
+ margin-left: 10px;
+ font-size: inherit;
+}
+#save-dialog, #delete-dialog {
+ width: 50%;
+ max-width: 20em;
+}
+#delete-prompt {
+ padding: 10px;
+}
+
+#content {
+ overflow-y: scroll;
+ padding: 1em;
+}
+#top {
+ overflow-y: scroll;
+}
+#graph {
+ overflow: hidden;
+}
+#graph svg {
+ width: 100%;
+ height: auto;
+ padding: 10px;
+}
+#content.source .filename {
+ margin-top: 0;
+ margin-bottom: 1em;
+ font-size: 120%;
+}
+#content.source pre {
+ margin-bottom: 3em;
+}
+table {
+ border-spacing: 0px;
+ width: 100%;
+ padding-bottom: 1em;
+ white-space: nowrap;
+}
+table thead {
+ font-family: 'Roboto Medium', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
+}
+table tr th {
+ position: sticky;
+ top: 0;
+ background-color: #ddd;
+ text-align: right;
+ padding: .3em .5em;
+}
+table tr td {
+ padding: .3em .5em;
+ text-align: right;
+}
+#top table tr th:nth-child(6),
+#top table tr th:nth-child(7),
+#top table tr td:nth-child(6),
+#top table tr td:nth-child(7) {
+ text-align: left;
+}
+#top table tr td:nth-child(6) {
+ width: 100%;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ white-space: nowrap;
+}
+#flathdr1, #flathdr2, #cumhdr1, #cumhdr2, #namehdr {
+ cursor: ns-resize;
+}
+.hilite {
+ background-color: #ebf5fb;
+ font-weight: bold;
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.js b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.js
new file mode 100644
index 0000000..ff980f6
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/common.js
@@ -0,0 +1,714 @@
+// Make svg pannable and zoomable.
+// Call clickHandler(t) if a click event is caught by the pan event handlers.
+function initPanAndZoom(svg, clickHandler) {
+ 'use strict';
+
+ // Current mouse/touch handling mode
+ const IDLE = 0;
+ const MOUSEPAN = 1;
+ const TOUCHPAN = 2;
+ const TOUCHZOOM = 3;
+ let mode = IDLE;
+
+ // State needed to implement zooming.
+ let currentScale = 1.0;
+ const initWidth = svg.viewBox.baseVal.width;
+ const initHeight = svg.viewBox.baseVal.height;
+
+ // State needed to implement panning.
+ let panLastX = 0; // Last event X coordinate
+ let panLastY = 0; // Last event Y coordinate
+ let moved = false; // Have we seen significant movement
+ let touchid = null; // Current touch identifier
+
+ // State needed for pinch zooming
+ let touchid2 = null; // Second id for pinch zooming
+ let initGap = 1.0; // Starting gap between two touches
+ let initScale = 1.0; // currentScale when pinch zoom started
+ let centerPoint = null; // Center point for scaling
+
+ // Convert event coordinates to svg coordinates.
+ function toSvg(x, y) {
+ const p = svg.createSVGPoint();
+ p.x = x;
+ p.y = y;
+ let m = svg.getCTM();
+ if (m == null) m = svg.getScreenCTM(); // Firefox workaround.
+ return p.matrixTransform(m.inverse());
+ }
+
+ // Change the scaling for the svg to s, keeping the point denoted
+ // by u (in svg coordinates]) fixed at the same screen location.
+ function rescale(s, u) {
+ // Limit to a good range.
+ if (s < 0.2) s = 0.2;
+ if (s > 10.0) s = 10.0;
+
+ currentScale = s;
+
+ // svg.viewBox defines the visible portion of the user coordinate
+ // system. So to magnify by s, divide the visible portion by s,
+ // which will then be stretched to fit the viewport.
+ const vb = svg.viewBox;
+ const w1 = vb.baseVal.width;
+ const w2 = initWidth / s;
+ const h1 = vb.baseVal.height;
+ const h2 = initHeight / s;
+ vb.baseVal.width = w2;
+ vb.baseVal.height = h2;
+
+ // We also want to adjust vb.baseVal.x so that u.x remains at same
+ // screen X coordinate. In other words, want to change it from x1 to x2
+ // so that:
+ // (u.x - x1) / w1 = (u.x - x2) / w2
+ // Simplifying that, we get
+ // (u.x - x1) * (w2 / w1) = u.x - x2
+ // x2 = u.x - (u.x - x1) * (w2 / w1)
+ vb.baseVal.x = u.x - (u.x - vb.baseVal.x) * (w2 / w1);
+ vb.baseVal.y = u.y - (u.y - vb.baseVal.y) * (h2 / h1);
+ }
+
+ function handleWheel(e) {
+ if (e.deltaY == 0) return;
+ // Change scale factor by 1.1 or 1/1.1
+ rescale(currentScale * (e.deltaY < 0 ? 1.1 : (1/1.1)),
+ toSvg(e.offsetX, e.offsetY));
+ }
+
+ function setMode(m) {
+ mode = m;
+ touchid = null;
+ touchid2 = null;
+ }
+
+ function panStart(x, y) {
+ moved = false;
+ panLastX = x;
+ panLastY = y;
+ }
+
+ function panMove(x, y) {
+ let dx = x - panLastX;
+ let dy = y - panLastY;
+ if (Math.abs(dx) <= 2 && Math.abs(dy) <= 2) return; // Ignore tiny moves
+
+ moved = true;
+ panLastX = x;
+ panLastY = y;
+
+ // Firefox workaround: get dimensions from parentNode.
+ const swidth = svg.clientWidth || svg.parentNode.clientWidth;
+ const sheight = svg.clientHeight || svg.parentNode.clientHeight;
+
+ // Convert deltas from screen space to svg space.
+ dx *= (svg.viewBox.baseVal.width / swidth);
+ dy *= (svg.viewBox.baseVal.height / sheight);
+
+ svg.viewBox.baseVal.x -= dx;
+ svg.viewBox.baseVal.y -= dy;
+ }
+
+ function handleScanStart(e) {
+ if (e.button != 0) return; // Do not catch right-clicks etc.
+ setMode(MOUSEPAN);
+ panStart(e.clientX, e.clientY);
+ e.preventDefault();
+ svg.addEventListener('mousemove', handleScanMove);
+ }
+
+ function handleScanMove(e) {
+ if (e.buttons == 0) {
+ // Missed an end event, perhaps because mouse moved outside window.
+ setMode(IDLE);
+ svg.removeEventListener('mousemove', handleScanMove);
+ return;
+ }
+ if (mode == MOUSEPAN) panMove(e.clientX, e.clientY);
+ }
+
+ function handleScanEnd(e) {
+ if (mode == MOUSEPAN) panMove(e.clientX, e.clientY);
+ setMode(IDLE);
+ svg.removeEventListener('mousemove', handleScanMove);
+ if (!moved) clickHandler(e.target);
+ }
+
+ // Find touch object with specified identifier.
+ function findTouch(tlist, id) {
+ for (const t of tlist) {
+ if (t.identifier == id) return t;
+ }
+ return null;
+ }
+
+ // Return distance between two touch points
+ function touchGap(t1, t2) {
+ const dx = t1.clientX - t2.clientX;
+ const dy = t1.clientY - t2.clientY;
+ return Math.hypot(dx, dy);
+ }
+
+ function handleTouchStart(e) {
+ if (mode == IDLE && e.changedTouches.length == 1) {
+ // Start touch based panning
+ const t = e.changedTouches[0];
+ setMode(TOUCHPAN);
+ touchid = t.identifier;
+ panStart(t.clientX, t.clientY);
+ e.preventDefault();
+ } else if (mode == TOUCHPAN && e.touches.length == 2) {
+ // Start pinch zooming
+ setMode(TOUCHZOOM);
+ const t1 = e.touches[0];
+ const t2 = e.touches[1];
+ touchid = t1.identifier;
+ touchid2 = t2.identifier;
+ initScale = currentScale;
+ initGap = touchGap(t1, t2);
+ centerPoint = toSvg((t1.clientX + t2.clientX) / 2,
+ (t1.clientY + t2.clientY) / 2);
+ e.preventDefault();
+ }
+ }
+
+ function handleTouchMove(e) {
+ if (mode == TOUCHPAN) {
+ const t = findTouch(e.changedTouches, touchid);
+ if (t == null) return;
+ if (e.touches.length != 1) {
+ setMode(IDLE);
+ return;
+ }
+ panMove(t.clientX, t.clientY);
+ e.preventDefault();
+ } else if (mode == TOUCHZOOM) {
+ // Get two touches; new gap; rescale to ratio.
+ const t1 = findTouch(e.touches, touchid);
+ const t2 = findTouch(e.touches, touchid2);
+ if (t1 == null || t2 == null) return;
+ const gap = touchGap(t1, t2);
+ rescale(initScale * gap / initGap, centerPoint);
+ e.preventDefault();
+ }
+ }
+
+ function handleTouchEnd(e) {
+ if (mode == TOUCHPAN) {
+ const t = findTouch(e.changedTouches, touchid);
+ if (t == null) return;
+ panMove(t.clientX, t.clientY);
+ setMode(IDLE);
+ e.preventDefault();
+ if (!moved) clickHandler(t.target);
+ } else if (mode == TOUCHZOOM) {
+ setMode(IDLE);
+ e.preventDefault();
+ }
+ }
+
+ svg.addEventListener('mousedown', handleScanStart);
+ svg.addEventListener('mouseup', handleScanEnd);
+ svg.addEventListener('touchstart', handleTouchStart);
+ svg.addEventListener('touchmove', handleTouchMove);
+ svg.addEventListener('touchend', handleTouchEnd);
+ svg.addEventListener('wheel', handleWheel, true);
+}
+
+function initMenus() {
+ 'use strict';
+
+ let activeMenu = null;
+ let activeMenuHdr = null;
+
+ function cancelActiveMenu() {
+ if (activeMenu == null) return;
+ activeMenu.style.display = 'none';
+ activeMenu = null;
+ activeMenuHdr = null;
+ }
+
+ // Set click handlers on every menu header.
+ for (const menu of document.getElementsByClassName('submenu')) {
+ const hdr = menu.parentElement;
+ if (hdr == null) return;
+ if (hdr.classList.contains('disabled')) return;
+ function showMenu(e) {
+ // menu is a child of hdr, so this event can fire for clicks
+ // inside menu. Ignore such clicks.
+ if (e.target.parentElement != hdr) return;
+ activeMenu = menu;
+ activeMenuHdr = hdr;
+ menu.style.display = 'block';
+ }
+ hdr.addEventListener('mousedown', showMenu);
+ hdr.addEventListener('touchstart', showMenu);
+ }
+
+ // If there is an active menu and a down event outside, retract the menu.
+ for (const t of ['mousedown', 'touchstart']) {
+ document.addEventListener(t, (e) => {
+ // Note: to avoid unnecessary flicker, if the down event is inside
+ // the active menu header, do not retract the menu.
+ if (activeMenuHdr != e.target.closest('.menu-item')) {
+ cancelActiveMenu();
+ }
+ }, { passive: true, capture: true });
+ }
+
+ // If there is an active menu and an up event inside, retract the menu.
+ document.addEventListener('mouseup', (e) => {
+ if (activeMenu == e.target.closest('.submenu')) {
+ cancelActiveMenu();
+ }
+ }, { passive: true, capture: true });
+}
+
+function sendURL(method, url, done) {
+ fetch(url.toString(), {method: method})
+ .then((response) => { done(response.ok); })
+ .catch((error) => { done(false); });
+}
+
+// Initialize handlers for saving/loading configurations.
+function initConfigManager() {
+ 'use strict';
+
+ // Initialize various elements.
+ function elem(id) {
+ const result = document.getElementById(id);
+ if (!result) console.warn('element ' + id + ' not found');
+ return result;
+ }
+ const overlay = elem('dialog-overlay');
+ const saveDialog = elem('save-dialog');
+ const saveInput = elem('save-name');
+ const saveError = elem('save-error');
+ const delDialog = elem('delete-dialog');
+ const delPrompt = elem('delete-prompt');
+ const delError = elem('delete-error');
+
+ let currentDialog = null;
+ let currentDeleteTarget = null;
+
+ function showDialog(dialog) {
+ if (currentDialog != null) {
+ overlay.style.display = 'none';
+ currentDialog.style.display = 'none';
+ }
+ currentDialog = dialog;
+ if (dialog != null) {
+ overlay.style.display = 'block';
+ dialog.style.display = 'block';
+ }
+ }
+
+ function cancelDialog(e) {
+ showDialog(null);
+ }
+
+ // Show dialog for saving the current config.
+ function showSaveDialog(e) {
+ saveError.innerText = '';
+ showDialog(saveDialog);
+ saveInput.focus();
+ }
+
+ // Commit save config.
+ function commitSave(e) {
+ const name = saveInput.value;
+ const url = new URL(document.URL);
+ // Set path relative to existing path.
+ url.pathname = new URL('./saveconfig', document.URL).pathname;
+ url.searchParams.set('config', name);
+ saveError.innerText = '';
+ sendURL('POST', url, (ok) => {
+ if (!ok) {
+ saveError.innerText = 'Save failed';
+ } else {
+ showDialog(null);
+ location.reload(); // Reload to show updated config menu
+ }
+ });
+ }
+
+ function handleSaveInputKey(e) {
+ if (e.key === 'Enter') commitSave(e);
+ }
+
+ function deleteConfig(e, elem) {
+ e.preventDefault();
+ const config = elem.dataset.config;
+ delPrompt.innerText = 'Delete ' + config + '?';
+ currentDeleteTarget = elem;
+ showDialog(delDialog);
+ }
+
+ function commitDelete(e, elem) {
+ if (!currentDeleteTarget) return;
+ const config = currentDeleteTarget.dataset.config;
+ const url = new URL('./deleteconfig', document.URL);
+ url.searchParams.set('config', config);
+ delError.innerText = '';
+ sendURL('DELETE', url, (ok) => {
+ if (!ok) {
+ delError.innerText = 'Delete failed';
+ return;
+ }
+ showDialog(null);
+ // Remove menu entry for this config.
+ if (currentDeleteTarget && currentDeleteTarget.parentElement) {
+ currentDeleteTarget.parentElement.remove();
+ }
+ });
+ }
+
+ // Bind event on elem to fn.
+ function bind(event, elem, fn) {
+ if (elem == null) return;
+ elem.addEventListener(event, fn);
+ if (event == 'click') {
+ // Also enable via touch.
+ elem.addEventListener('touchstart', fn);
+ }
+ }
+
+ bind('click', elem('save-config'), showSaveDialog);
+ bind('click', elem('save-cancel'), cancelDialog);
+ bind('click', elem('save-confirm'), commitSave);
+ bind('keydown', saveInput, handleSaveInputKey);
+
+ bind('click', elem('delete-cancel'), cancelDialog);
+ bind('click', elem('delete-confirm'), commitDelete);
+
+ // Activate deletion button for all config entries in menu.
+ for (const del of Array.from(document.getElementsByClassName('menu-delete-btn'))) {
+ bind('click', del, (e) => {
+ deleteConfig(e, del);
+ });
+ }
+}
+
+// options if present can contain:
+// hiliter: function(Number, Boolean): Boolean
+// Overridable mechanism for highlighting/unhighlighting specified node.
+// current: function() Map[Number,Boolean]
+// Overridable mechanism for fetching set of currently selected nodes.
+function viewer(baseUrl, nodes, options) {
+ 'use strict';
+
+ // Elements
+ const search = document.getElementById('search');
+ const graph0 = document.getElementById('graph0');
+ const svg = (graph0 == null ? null : graph0.parentElement);
+ const toptable = document.getElementById('toptable');
+
+ let regexpActive = false;
+ let selected = new Map();
+ let origFill = new Map();
+ let searchAlarm = null;
+ let buttonsEnabled = true;
+
+ // Return current selection.
+ function getSelection() {
+ if (selected.size > 0) {
+ return selected;
+ } else if (options && options.current) {
+ return options.current();
+ }
+ return new Map();
+ }
+
+ function handleDetails(e) {
+ e.preventDefault();
+ const detailsText = document.getElementById('detailsbox');
+ if (detailsText != null) {
+ if (detailsText.style.display === 'block') {
+ detailsText.style.display = 'none';
+ } else {
+ detailsText.style.display = 'block';
+ }
+ }
+ }
+
+ function handleKey(e) {
+ if (e.keyCode != 13) return;
+ setHrefParams(window.location, function (params) {
+ params.set('f', search.value);
+ });
+ e.preventDefault();
+ }
+
+ function handleSearch() {
+ // Delay expensive processing so a flurry of key strokes is handled once.
+ if (searchAlarm != null) {
+ clearTimeout(searchAlarm);
+ }
+ searchAlarm = setTimeout(selectMatching, 300);
+
+ regexpActive = true;
+ updateButtons();
+ }
+
+ function selectMatching() {
+ searchAlarm = null;
+ let re = null;
+ if (search.value != '') {
+ try {
+ re = new RegExp(search.value);
+ } catch (e) {
+ // TODO: Display error state in search box
+ return;
+ }
+ }
+
+ function match(text) {
+ return re != null && re.test(text);
+ }
+
+ // drop currently selected items that do not match re.
+ selected.forEach(function(v, n) {
+ if (!match(nodes[n])) {
+ unselect(n);
+ }
+ })
+
+ // add matching items that are not currently selected.
+ if (nodes) {
+ for (let n = 0; n < nodes.length; n++) {
+ if (!selected.has(n) && match(nodes[n])) {
+ select(n);
+ }
+ }
+ }
+
+ updateButtons();
+ }
+
+ function toggleSvgSelect(elem) {
+ // Walk up to immediate child of graph0
+ while (elem != null && elem.parentElement != graph0) {
+ elem = elem.parentElement;
+ }
+ if (!elem) return;
+
+ // Disable regexp mode.
+ regexpActive = false;
+
+ const n = nodeId(elem);
+ if (n < 0) return;
+ if (selected.has(n)) {
+ unselect(n);
+ } else {
+ select(n);
+ }
+ updateButtons();
+ }
+
+ function unselect(n) {
+ if (setNodeHighlight(n, false)) selected.delete(n);
+ }
+
+ function select(n, elem) {
+ if (setNodeHighlight(n, true)) selected.set(n, true);
+ }
+
+ function nodeId(elem) {
+ const id = elem.id;
+ if (!id) return -1;
+ if (!id.startsWith('node')) return -1;
+ const n = parseInt(id.slice(4), 10);
+ if (isNaN(n)) return -1;
+ if (n < 0 || n >= nodes.length) return -1;
+ return n;
+ }
+
+ // Change highlighting of node (returns true if node was found).
+ function setNodeHighlight(n, set) {
+ if (options && options.hiliter) return options.hiliter(n, set);
+
+ const elem = document.getElementById('node' + n);
+ if (!elem) return false;
+
+ // Handle table row highlighting.
+ if (elem.nodeName == 'TR') {
+ elem.classList.toggle('hilite', set);
+ return true;
+ }
+
+ // Handle svg element highlighting.
+ const p = findPolygon(elem);
+ if (p != null) {
+ if (set) {
+ origFill.set(p, p.style.fill);
+ p.style.fill = '#ccccff';
+ } else if (origFill.has(p)) {
+ p.style.fill = origFill.get(p);
+ }
+ }
+
+ return true;
+ }
+
+ function findPolygon(elem) {
+ if (elem.localName == 'polygon') return elem;
+ for (const c of elem.children) {
+ const p = findPolygon(c);
+ if (p != null) return p;
+ }
+ return null;
+ }
+
+ // convert a string to a regexp that matches that string.
+ function quotemeta(str) {
+ return str.replace(/([\\\.?+*\[\](){}|^$])/g, '\\$1');
+ }
+
+ function setSampleIndexLink(si) {
+ const elem = document.getElementById('sampletype-' + si);
+ if (elem != null) {
+ setHrefParams(elem, function (params) {
+ params.set("si", si);
+ });
+ }
+ }
+
+ // Update id's href to reflect current selection whenever it is
+ // liable to be followed.
+ function makeSearchLinkDynamic(id) {
+ const elem = document.getElementById(id);
+ if (elem == null) return;
+
+ // Most links copy current selection into the 'f' parameter,
+ // but Refine menu links are different.
+ let param = 'f';
+ if (id == 'ignore') param = 'i';
+ if (id == 'hide') param = 'h';
+ if (id == 'show') param = 's';
+ if (id == 'show-from') param = 'sf';
+
+ // We update on mouseenter so middle-click/right-click work properly.
+ elem.addEventListener('mouseenter', updater);
+ elem.addEventListener('touchstart', updater);
+
+ function updater() {
+ // The selection can be in one of two modes: regexp-based or
+ // list-based. Construct regular expression depending on mode.
+ let re = regexpActive
+ ? search.value
+ : Array.from(getSelection().keys()).map(key => quotemeta(nodes[key])).join('|');
+
+ setHrefParams(elem, function (params) {
+ if (re != '') {
+ // For focus/show/show-from, forget old parameter. For others, add to re.
+ if (param != 'f' && param != 's' && param != 'sf' && params.has(param)) {
+ const old = params.get(param);
+ if (old != '') {
+ re += '|' + old;
+ }
+ }
+ params.set(param, re);
+ } else {
+ params.delete(param);
+ }
+ });
+ }
+ }
+
+ function setHrefParams(elem, paramSetter) {
+ let url = new URL(elem.href);
+ url.hash = '';
+
+ // Copy params from this page's URL.
+ const params = url.searchParams;
+ for (const p of new URLSearchParams(window.location.search)) {
+ params.set(p[0], p[1]);
+ }
+
+ // Give the params to the setter to modify.
+ paramSetter(params);
+
+ elem.href = url.toString();
+ }
+
+ function handleTopClick(e) {
+ // Walk back until we find TR and then get the Name column (index 5)
+ let elem = e.target;
+ while (elem != null && elem.nodeName != 'TR') {
+ elem = elem.parentElement;
+ }
+ if (elem == null || elem.children.length < 6) return;
+
+ e.preventDefault();
+ const tr = elem;
+ const td = elem.children[5];
+ if (td.nodeName != 'TD') return;
+ const name = td.innerText;
+ const index = nodes.indexOf(name);
+ if (index < 0) return;
+
+ // Disable regexp mode.
+ regexpActive = false;
+
+ if (selected.has(index)) {
+ unselect(index, elem);
+ } else {
+ select(index, elem);
+ }
+ updateButtons();
+ }
+
+ function updateButtons() {
+ const enable = (search.value != '' || getSelection().size != 0);
+ if (buttonsEnabled == enable) return;
+ buttonsEnabled = enable;
+ for (const id of ['focus', 'ignore', 'hide', 'show', 'show-from']) {
+ const link = document.getElementById(id);
+ if (link != null) {
+ link.classList.toggle('disabled', !enable);
+ }
+ }
+ }
+
+ // Initialize button states
+ updateButtons();
+
+ // Setup event handlers
+ initMenus();
+ if (svg != null) {
+ initPanAndZoom(svg, toggleSvgSelect);
+ }
+ if (toptable != null) {
+ toptable.addEventListener('mousedown', handleTopClick);
+ toptable.addEventListener('touchstart', handleTopClick);
+ }
+
+ const ids = ['topbtn', 'graphbtn',
+ 'flamegraph', 'flamegraph2', 'flamegraphold',
+ 'peek', 'list',
+ 'disasm', 'focus', 'ignore', 'hide', 'show', 'show-from'];
+ ids.forEach(makeSearchLinkDynamic);
+
+ const sampleIDs = [{{range .SampleTypes}}'{{.}}', {{end}}];
+ sampleIDs.forEach(setSampleIndexLink);
+
+ // Bind action to button with specified id.
+ function addAction(id, action) {
+ const btn = document.getElementById(id);
+ if (btn != null) {
+ btn.addEventListener('click', action);
+ btn.addEventListener('touchstart', action);
+ }
+ }
+
+ addAction('details', handleDetails);
+ initConfigManager();
+
+ search.addEventListener('input', handleSearch);
+ search.addEventListener('keydown', handleKey);
+
+ // Give initial focus to main container so it can be scrolled using keys.
+ const main = document.getElementById('bodycontainer');
+ if (main) {
+ main.focus();
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/flamegraph.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/flamegraph.html
new file mode 100644
index 0000000..9866755
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/flamegraph.html
@@ -0,0 +1,103 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ <style type="text/css">{{template "d3flamegraphcss" .}}</style>
+ <style type="text/css">
+ .flamegraph-content {
+ width: 90%;
+ min-width: 80%;
+ margin-left: 5%;
+ }
+ .flamegraph-details {
+ height: 1.2em;
+ width: 90%;
+ min-width: 90%;
+ margin-left: 5%;
+ padding: 15px 0 35px;
+ }
+ </style>
+</head>
+<body>
+ {{template "header" .}}
+ <div id="bodycontainer">
+ <div id="flamegraphdetails" class="flamegraph-details"></div>
+ <div class="flamegraph-content">
+ <div id="chart"></div>
+ </div>
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), {{.Nodes}});</script>
+ <script>{{template "d3flamegraphscript" .}}</script>
+ <script>
+ {{- /* Deserialize as JSON instead of a JS object literal because the browser's
+ JSON parser can handle larger payloads than its JS parser. */ -}}
+ var data = JSON.parse("{{.FlameGraph}}");
+
+ var width = document.getElementById('chart').clientWidth;
+
+ var flameGraph = flamegraph()
+ .width(width)
+ .cellHeight(18)
+ .minFrameSize(1)
+ .transitionDuration(750)
+ .inverted(true)
+ .sort(true)
+ .title('')
+ .tooltip(false)
+ .setDetailsElement(document.getElementById('flamegraphdetails'));
+
+ // <full name> (percentage, value)
+ flameGraph.label((d) => d.data.f + ' (' + d.data.p + ', ' + d.data.l + ')');
+
+ flameGraph.setColorHue('warm');
+
+ select('#chart')
+ .datum(data)
+ .call(flameGraph);
+
+ function clear() {
+ flameGraph.clear();
+ }
+
+ function resetZoom() {
+ flameGraph.resetZoom();
+ }
+
+ window.addEventListener('resize', function() {
+ var width = document.getElementById('chart').clientWidth;
+ var graphs = document.getElementsByClassName('d3-flame-graph');
+ if (graphs.length > 0) {
+ graphs[0].setAttribute('width', width);
+ }
+ flameGraph.width(width);
+ flameGraph.resetZoom();
+ }, true);
+
+ var search = document.getElementById('search');
+ var searchAlarm = null;
+
+ function selectMatching() {
+ searchAlarm = null;
+
+ if (search.value != '') {
+ flameGraph.search(search.value);
+ } else {
+ flameGraph.clear();
+ }
+ }
+
+ function handleSearch() {
+ // Delay expensive processing so a flurry of key strokes is handled once.
+ if (searchAlarm != null) {
+ clearTimeout(searchAlarm);
+ }
+ searchAlarm = setTimeout(selectMatching, 300);
+ }
+
+ search.addEventListener('input', handleSearch);
+ </script>
+</body>
+</html>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/graph.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/graph.html
new file mode 100644
index 0000000..a113549
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/graph.html
@@ -0,0 +1,16 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="graph">
+ {{.HTMLBody}}
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), {{.Nodes}});</script>
+</body>
+</html>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/header.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/header.html
new file mode 100644
index 0000000..42cb796
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/header.html
@@ -0,0 +1,114 @@
+<div class="header">
+ <div class="title">
+ <h1><a href="./">pprof</a></h1>
+ </div>
+
+ <div id="view" class="menu-item">
+ <div class="menu-name">
+ View
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ <a title="{{.Help.top}}" href="./top" id="topbtn">Top</a>
+ <a title="{{.Help.graph}}" href="./" id="graphbtn">Graph</a>
+ <a title="{{.Help.flamegraph}}" href="./flamegraph" id="flamegraph">Flame Graph</a>
+ <a title="{{.Help.flamegraphold}}" href="./flamegraphold" id="flamegraphold">Flame Graph (old)</a>
+ <a title="{{.Help.peek}}" href="./peek" id="peek">Peek</a>
+ <a title="{{.Help.list}}" href="./source" id="list">Source</a>
+ <a title="{{.Help.disasm}}" href="./disasm" id="disasm">Disassemble</a>
+ </div>
+ </div>
+
+ {{$sampleLen := len .SampleTypes}}
+ {{if gt $sampleLen 1}}
+ <div id="sample" class="menu-item">
+ <div class="menu-name">
+ Sample
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ {{range .SampleTypes}}
+ <a href="?si={{.}}" id="sampletype-{{.}}">{{.}}</a>
+ {{end}}
+ </div>
+ </div>
+ {{end}}
+
+ <div id="refine" class="menu-item">
+ <div class="menu-name">
+ Refine
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ <a title="{{.Help.focus}}" href="?" id="focus">Focus</a>
+ <a title="{{.Help.ignore}}" href="?" id="ignore">Ignore</a>
+ <a title="{{.Help.hide}}" href="?" id="hide">Hide</a>
+ <a title="{{.Help.show}}" href="?" id="show">Show</a>
+ <a title="{{.Help.show_from}}" href="?" id="show-from">Show from</a>
+ <hr>
+ <a title="{{.Help.reset}}" href="?">Reset</a>
+ </div>
+ </div>
+
+ <div id="config" class="menu-item">
+ <div class="menu-name">
+ Config
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ <a title="{{.Help.save_config}}" id="save-config">Save as ...</a>
+ <hr>
+ {{range .Configs}}
+ <a href="{{.URL}}">
+ {{if .Current}}<span class="menu-check-mark">✓</span>{{end}}
+ {{.Name}}
+ {{if .UserConfig}}<span class="menu-delete-btn" data-config={{.Name}}>🗙</span>{{end}}
+ </a>
+ {{end}}
+ </div>
+ </div>
+
+ <div id="download" class="menu-item">
+ <div class="menu-name">
+ <a href="./download">Download</a>
+ </div>
+ </div>
+
+ <div>
+ <input id="search" type="text" placeholder="Search regexp" autocomplete="off" autocapitalize="none" size=40>
+ </div>
+
+ <div class="description">
+ <a title="{{.Help.details}}" href="#" id="details">{{.Title}}</a>
+ <div id="detailsbox">
+ {{range .Legend}}<div>{{.}}</div>{{end}}
+ </div>
+ </div>
+</div>
+
+<div id="dialog-overlay"></div>
+
+<div class="dialog" id="save-dialog">
+ <div class="dialog-header">Save options as</div>
+ <datalist id="config-list">
+ {{range .Configs}}{{if .UserConfig}}<option value="{{.Name}}" />{{end}}{{end}}
+ </datalist>
+ <input id="save-name" type="text" list="config-list" placeholder="New config" />
+ <div class="dialog-footer">
+ <span class="dialog-error" id="save-error"></span>
+ <button id="save-cancel">Cancel</button>
+ <button id="save-confirm">Save</button>
+ </div>
+</div>
+
+<div class="dialog" id="delete-dialog">
+ <div class="dialog-header" id="delete-dialog-title">Delete config</div>
+ <div id="delete-prompt"></div>
+ <div class="dialog-footer">
+ <span class="dialog-error" id="delete-error"></span>
+ <button id="delete-cancel">Cancel</button>
+ <button id="delete-confirm">Delete</button>
+ </div>
+</div>
+
+<div id="errors">{{range .Errors}}<div>{{.}}</div>{{end}}</div>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/plaintext.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/plaintext.html
new file mode 100644
index 0000000..9791cc7
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/plaintext.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="content">
+ <pre>
+ {{.TextBody}}
+ </pre>
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), null);</script>
+</body>
+</html>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/source.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/source.html
new file mode 100644
index 0000000..3212bee
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/source.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ {{template "weblistcss" .}}
+ {{template "weblistjs" .}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="content" class="source">
+ {{.HTMLBody}}
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), null);</script>
+</body>
+</html>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.css b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.css
new file mode 100644
index 0000000..f5aeb98
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.css
@@ -0,0 +1,78 @@
+body {
+ overflow: hidden; /* Want scrollbar not here, but in #stack-holder */
+}
+/* Scrollable container for flame graph */
+#stack-holder {
+ width: 100%;
+ flex-grow: 1;
+ overflow-y: auto;
+ background: #eee; /* Light grey gives better contrast with boxes */
+ position: relative; /* Allows absolute positioning of child boxes */
+}
+/* Flame graph */
+#stack-chart {
+ width: 100%;
+ position: relative; /* Allows absolute positioning of child boxes */
+}
+/* Shows details of frame that is under the mouse */
+#current-details {
+ position: absolute;
+ top: 5px;
+ right: 5px;
+ z-index: 2;
+ font-size: 12pt;
+}
+/* Background of a single flame-graph frame */
+.boxbg {
+ border-width: 0px;
+ position: absolute;
+ overflow: hidden;
+ box-sizing: border-box;
+ background: #d8d8d8;
+}
+.positive { position: absolute; background: #caa; }
+.negative { position: absolute; background: #aca; }
+/* Not-inlined frames are visually separated from their caller. */
+.not-inlined {
+ border-top: 1px solid black;
+}
+/* Function name */
+.boxtext {
+ position: absolute;
+ width: 100%;
+ padding-left: 2px;
+ line-height: 18px;
+ cursor: default;
+ font-family: "Google Sans", Arial, sans-serif;
+ font-size: 12pt;
+ z-index: 2;
+}
+/* Box highlighting via shadows to avoid size changes */
+.hilite { box-shadow: 0px 0px 0px 2px #000; z-index: 1; }
+.hilite2 { box-shadow: 0px 0px 0px 2px #000; z-index: 1; }
+/* Gap left between callers and callees */
+.separator {
+ position: absolute;
+ text-align: center;
+ font-size: 12pt;
+ font-weight: bold;
+}
+/* Ensure that pprof menu is above boxes */
+.submenu { z-index: 3; }
+/* Right-click menu */
+#action-menu {
+ max-width: 15em;
+}
+/* Right-click menu title */
+#action-title {
+ display: block;
+ padding: 0.5em 1em;
+ background: #888;
+ text-overflow: ellipsis;
+ overflow: hidden;
+}
+/* Internal canvas used to measure text size when picking fonts */
+#textsizer {
+ position: absolute;
+ bottom: -100px;
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.html
new file mode 100644
index 0000000..1ddb7a3
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.html
@@ -0,0 +1,32 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ {{template "stacks_css"}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="stack-holder">
+ <div id="stack-chart"></div>
+ <div id="current-details"></div>
+ </div>
+ <div id="action-menu" class="submenu">
+ <span id="action-title"></span>
+ <hr>
+ <a title="{{.Help.list}}" id="action-source" href="./source">Show source code</a>
+ <a title="{{.Help.list}}" id="action-source-tab" href="./source" target="_blank">Show source in new tab</a>
+ <hr>
+ <a title="{{.Help.focus}}" id="action-focus" href="?">Focus</a>
+ <a title="{{.Help.ignore}}" id="action-ignore" href="?">Ignore</a>
+ <a title="{{.Help.hide}}" id="action-hide" href="?">Hide</a>
+ <a title="{{.Help.show_from}}" id="action-showfrom" href="?">Show from</a>
+ </div>
+ {{template "script" .}}
+ {{template "stacks_js"}}
+ <script>
+ stackViewer({{.Stacks}}, {{.Nodes}});
+ </script>
+</body>
+</html>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.js b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.js
new file mode 100644
index 0000000..be78edd
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/stacks.js
@@ -0,0 +1,600 @@
+// stackViewer displays a flame-graph like view (extended to show callers).
+// stacks - report.StackSet
+// nodes - List of names for each source in report.StackSet
+function stackViewer(stacks, nodes) {
+ 'use strict';
+
+ // Constants used in rendering.
+ const ROW = 20;
+ const PADDING = 2;
+ const MIN_WIDTH = 4;
+ const MIN_TEXT_WIDTH = 16;
+ const TEXT_MARGIN = 2;
+ const FONT_SIZE = 12;
+ const MIN_FONT_SIZE = 8;
+
+ // Mapping from unit to a list of display scales/labels.
+ // List should be ordered by increasing unit size.
+ const UNITS = new Map([
+ ['B', [
+ ['B', 1],
+ ['kB', Math.pow(2, 10)],
+ ['MB', Math.pow(2, 20)],
+ ['GB', Math.pow(2, 30)],
+ ['TB', Math.pow(2, 40)],
+ ['PB', Math.pow(2, 50)]]],
+ ['s', [
+ ['ns', 1e-9],
+ ['µs', 1e-6],
+ ['ms', 1e-3],
+ ['s', 1],
+ ['hrs', 60*60]]]]);
+
+ // Fields
+ let pivots = []; // Indices of currently selected data.Sources entries.
+ let matches = new Set(); // Indices of sources that match search
+ let elems = new Map(); // Mapping from source index to display elements
+ let displayList = []; // List of boxes to display.
+ let actionMenuOn = false; // Is action menu visible?
+ let actionTarget = null; // Box on which action menu is operating.
+ let diff = false; // Are we displaying a diff?
+
+ for (const stack of stacks.Stacks) {
+ if (stack.Value < 0) {
+ diff = true;
+ break;
+ }
+ }
+
+ // Setup to allow measuring text width.
+ const textSizer = document.createElement('canvas');
+ textSizer.id = 'textsizer';
+ const textContext = textSizer.getContext('2d');
+
+ // Get DOM elements.
+ const chart = find('stack-chart');
+ const search = find('search');
+ const actions = find('action-menu');
+ const actionTitle = find('action-title');
+ const detailBox = find('current-details');
+
+ window.addEventListener('resize', render);
+ window.addEventListener('popstate', render);
+ search.addEventListener('keydown', handleSearchKey);
+
+ // Withdraw action menu when clicking outside, or when item selected.
+ document.addEventListener('mousedown', (e) => {
+ if (!actions.contains(e.target)) {
+ hideActionMenu();
+ }
+ });
+ actions.addEventListener('click', hideActionMenu);
+
+ // Initialize menus and other general UI elements.
+ viewer(new URL(window.location.href), nodes, {
+ hiliter: (n, on) => { return hilite(n, on); },
+ current: () => {
+ let r = new Map();
+ for (let p of pivots) {
+ r.set(p, true);
+ }
+ return r;
+ }});
+
+ render();
+
+ // Helper functions follow:
+
+ // hilite changes the highlighting of elements corresponding to specified src.
+ function hilite(src, on) {
+ if (on) {
+ matches.add(src);
+ } else {
+ matches.delete(src);
+ }
+ toggleClass(src, 'hilite', on);
+ return true;
+ }
+
+ // Display action menu (triggered by right-click on a frame)
+ function showActionMenu(e, box) {
+ if (box.src == 0) return; // No action menu for root
+ e.preventDefault(); // Disable browser context menu
+ const src = stacks.Sources[box.src];
+ actionTitle.innerText = src.Display[src.Display.length-1];
+ const menu = actions;
+ menu.style.display = 'block';
+ // Compute position so menu stays visible and near the mouse.
+ const x = Math.min(e.clientX - 10, document.body.clientWidth - menu.clientWidth);
+ const y = Math.min(e.clientY - 10, document.body.clientHeight - menu.clientHeight);
+ menu.style.left = x + 'px';
+ menu.style.top = y + 'px';
+ // Set menu links to operate on clicked box.
+ setHrefParam('action-source', 'f', box.src);
+ setHrefParam('action-source-tab', 'f', box.src);
+ setHrefParam('action-focus', 'f', box.src);
+ setHrefParam('action-ignore', 'i', box.src);
+ setHrefParam('action-hide', 'h', box.src);
+ setHrefParam('action-showfrom', 'sf', box.src);
+ toggleClass(box.src, 'hilite2', true);
+ actionTarget = box;
+ actionMenuOn = true;
+ }
+
+ function hideActionMenu() {
+ actions.style.display = 'none';
+ actionMenuOn = false;
+ if (actionTarget != null) {
+ toggleClass(actionTarget.src, 'hilite2', false);
+ }
+ }
+
+ // setHrefParam updates the specified parameter in the href of an <a>
+ // element to make it operate on the specified src.
+ function setHrefParam(id, param, src) {
+ const elem = document.getElementById(id);
+ if (!elem) return;
+
+ let url = new URL(elem.href);
+ url.hash = '';
+
+ // Copy params from this page's URL.
+ const params = url.searchParams;
+ for (const p of new URLSearchParams(window.location.search)) {
+ params.set(p[0], p[1]);
+ }
+
+ // Update params to include src.
+ let v = stacks.Sources[src].RE;
+ if (param != 'f' && param != 'sf') { // old f,sf values are overwritten
+ // Add new source to current parameter value.
+ const old = params.get(param);
+ if (old && old != '') {
+ v += '|' + old;
+ }
+ }
+ params.set(param, v);
+
+ elem.href = url.toString();
+ }
+
+ // Capture Enter key in the search box to make it pivot instead of focus.
+ function handleSearchKey(e) {
+ if (e.key != 'Enter') return;
+ e.stopImmediatePropagation(); // Disable normal enter key handling
+ const val = search.value;
+ try {
+ new RegExp(search.value);
+ } catch (error) {
+ return; // TODO: Display error state in search box
+ }
+ switchPivots(val);
+ }
+
+ function switchPivots(regexp) {
+ // Switch URL without hitting the server.
+ const url = new URL(document.URL);
+ url.searchParams.set('p', regexp);
+ history.pushState('', '', url.toString()); // Makes back-button work
+ matches = new Set();
+ search.value = '';
+ render();
+ }
+
+ function handleEnter(box, div) {
+ if (actionMenuOn) return;
+ const src = stacks.Sources[box.src];
+ div.title = details(box) + ' │ ' + src.FullName + (src.Inlined ? "\n(inlined)" : "");
+ detailBox.innerText = summary(box.sumpos, box.sumneg);
+ // Highlight all boxes that have the same source as box.
+ toggleClass(box.src, 'hilite2', true);
+ }
+
+ function handleLeave(box) {
+ if (actionMenuOn) return;
+ detailBox.innerText = '';
+ toggleClass(box.src, 'hilite2', false);
+ }
+
+ // Return list of sources that match the regexp given by the 'p' URL parameter.
+ function urlPivots() {
+ const pivots = [];
+ const params = (new URL(document.URL)).searchParams;
+ const val = params.get('p');
+ if (val !== null && val != '') {
+ try {
+ const re = new RegExp(val);
+ for (let i = 0; i < stacks.Sources.length; i++) {
+ const src = stacks.Sources[i];
+ if (re.test(src.UniqueName) || re.test(src.FileName)) {
+ pivots.push(i);
+ }
+ }
+ } catch (error) {}
+ }
+ if (pivots.length == 0) {
+ pivots.push(0);
+ }
+ return pivots;
+ }
+
+ // render re-generates the stack display.
+ function render() {
+ pivots = urlPivots();
+
+ // Get places where pivots occur.
+ let places = [];
+ for (let pivot of pivots) {
+ const src = stacks.Sources[pivot];
+ for (let p of src.Places) {
+ places.push(p);
+ }
+ }
+
+ const width = chart.clientWidth;
+ elems.clear();
+ actionTarget = null;
+ const [pos, neg] = totalValue(places);
+ const total = pos + neg;
+ const xscale = (width-2*PADDING) / total; // Converts from profile value to X pixels
+ const x = PADDING;
+ const y = 0;
+
+ displayList.length = 0;
+ renderStacks(0, xscale, x, y, places, +1); // Callees
+ renderStacks(0, xscale, x, y-ROW, places, -1); // Callers (ROW left for separator)
+ display(xscale, pos, neg, displayList);
+ }
+
+ // renderStacks creates boxes with top-left at x,y with children drawn as
+ // nested stacks (below or above based on the sign of direction).
+ // Returns the largest y coordinate filled.
+ function renderStacks(depth, xscale, x, y, places, direction) {
+ // Example: suppose we are drawing the following stacks:
+ // a->b->c
+ // a->b->d
+ // a->e->f
+ // After rendering a, we will call renderStacks, with places pointing to
+ // the preceding stacks.
+ //
+ // We first group all places with the same leading entry. In this example
+ // we get [b->c, b->d] and [e->f]. We render the two groups side-by-side.
+ const groups = partitionPlaces(places);
+ for (const g of groups) {
+ renderGroup(depth, xscale, x, y, g, direction);
+ x += groupWidth(xscale, g);
+ }
+ }
+
+ // Some of the types used below:
+ //
+ // // Group represents a displayed (sub)tree.
+ // interface Group {
+ // name: string; // Full name of source
+ // src: number; // Index in stacks.Sources
+ // self: number; // Contribution as leaf (may be < 0 for diffs)
+ // sumpos: number; // Sum of |self| of positive nodes in tree (>= 0)
+ // sumneg: number; // Sum of |self| of negative nodes in tree (>= 0)
+ // places: Place[]; // Stack slots that contributed to this group
+ // }
+ //
+ // // Box is a rendered item.
+ // interface Box {
+ // x: number; // X coordinate of top-left
+ // y: number; // Y coordinate of top-left
+ // width: number; // Width of box to display
+ // src: number; // Index in stacks.Sources
+ // sumpos: number; // From corresponding Group
+ // sumneg: number; // From corresponding Group
+ // self: number; // From corresponding Group
+ // };
+
+ function groupWidth(xscale, g) {
+ return xscale * (g.sumpos + g.sumneg);
+ }
+
+ function renderGroup(depth, xscale, x, y, g, direction) {
+ // Skip if not wide enough.
+ const width = groupWidth(xscale, g);
+ if (width < MIN_WIDTH) return;
+
+ // Draw the box for g.src (except for selected element in upwards direction
+ // since that duplicates the box we added in downwards direction).
+ if (depth != 0 || direction > 0) {
+ const box = {
+ x: x,
+ y: y,
+ width: width,
+ src: g.src,
+ sumpos: g.sumpos,
+ sumneg: g.sumneg,
+ self: g.self,
+ };
+ displayList.push(box);
+ if (direction > 0) {
+ // Leave gap on left hand side to indicate self contribution.
+ x += xscale*Math.abs(g.self);
+ }
+ }
+ y += direction * ROW;
+
+ // Find child or parent stacks.
+ const next = [];
+ for (const place of g.places) {
+ const stack = stacks.Stacks[place.Stack];
+ const nextSlot = place.Pos + direction;
+ if (nextSlot >= 0 && nextSlot < stack.Sources.length) {
+ next.push({Stack: place.Stack, Pos: nextSlot});
+ }
+ }
+ renderStacks(depth+1, xscale, x, y, next, direction);
+ }
+
+ // partitionPlaces partitions a set of places into groups where each group
+ // contains places with the same source. If a stack occurs multiple times
+ // in places, only the outer-most occurrence is kept.
+ function partitionPlaces(places) {
+ // Find outer-most slot per stack (used later to elide duplicate stacks).
+ const stackMap = new Map(); // Map from stack index to outer-most slot#
+ for (const place of places) {
+ const prevSlot = stackMap.get(place.Stack);
+ if (prevSlot && prevSlot <= place.Pos) {
+ // We already have a higher slot in this stack.
+ } else {
+ stackMap.set(place.Stack, place.Pos);
+ }
+ }
+
+ // Now partition the stacks.
+ const groups = []; // Array of Group {name, src, sum, self, places}
+ const groupMap = new Map(); // Map from Source to Group
+ for (const place of places) {
+ if (stackMap.get(place.Stack) != place.Pos) {
+ continue;
+ }
+
+ const stack = stacks.Stacks[place.Stack];
+ const src = stack.Sources[place.Pos];
+ let group = groupMap.get(src);
+ if (!group) {
+ const name = stacks.Sources[src].FullName;
+ group = {name: name, src: src, sumpos: 0, sumneg: 0, self: 0, places: []};
+ groupMap.set(src, group);
+ groups.push(group);
+ }
+ if (stack.Value < 0) {
+ group.sumneg += -stack.Value;
+ } else {
+ group.sumpos += stack.Value;
+ }
+ group.self += (place.Pos == stack.Sources.length-1) ? stack.Value : 0;
+ group.places.push(place);
+ }
+
+ // Order by decreasing cost (makes it easier to spot heavy functions).
+ // Though alphabetical ordering is a potential alternative that will make
+ // profile comparisons easier.
+ groups.sort(function(a, b) {
+ return (b.sumpos + b.sumneg) - (a.sumpos + a.sumneg);
+ });
+
+ return groups;
+ }
+
+ function display(xscale, posTotal, negTotal, list) {
+ // Sort boxes so that text selection follows a predictable order.
+ list.sort(function(a, b) {
+ if (a.y != b.y) return a.y - b.y;
+ return a.x - b.x;
+ });
+
+ // Adjust Y coordinates so that zero is at top.
+ let adjust = (list.length > 0) ? list[0].y : 0;
+ adjust -= ROW + 2*PADDING; // Room for details
+
+ const divs = [];
+ for (const box of list) {
+ box.y -= adjust;
+ divs.push(drawBox(xscale, box));
+ }
+ divs.push(drawSep(-adjust, posTotal, negTotal));
+
+ const h = (list.length > 0 ? list[list.length-1].y : 0) + 4*ROW;
+ chart.style.height = h+'px';
+ chart.replaceChildren(...divs);
+ }
+
+ function drawBox(xscale, box) {
+ const srcIndex = box.src;
+ const src = stacks.Sources[srcIndex];
+
+ function makeRect(cl, x, y, w, h) {
+ const r = document.createElement('div');
+ r.style.left = x+'px';
+ r.style.top = y+'px';
+ r.style.width = w+'px';
+ r.style.height = h+'px';
+ r.classList.add(cl);
+ return r;
+ }
+
+ // Background
+ const w = box.width - 1; // Leave 1px gap
+ const r = makeRect('boxbg', box.x, box.y, w, ROW);
+ if (!diff) r.style.background = makeColor(src.Color);
+ addElem(srcIndex, r);
+ if (!src.Inlined) {
+ r.classList.add('not-inlined');
+ }
+
+ // Positive/negative indicator for diff mode.
+ if (diff) {
+ const delta = box.sumpos - box.sumneg;
+ const partWidth = xscale * Math.abs(delta);
+ if (partWidth >= MIN_WIDTH) {
+ r.appendChild(makeRect((delta < 0 ? 'negative' : 'positive'),
+ 0, 0, partWidth, ROW-1));
+ }
+ }
+
+ // Label
+ if (box.width >= MIN_TEXT_WIDTH) {
+ const t = document.createElement('div');
+ t.classList.add('boxtext');
+ fitText(t, box.width-2*TEXT_MARGIN, src.Display);
+ r.appendChild(t);
+ }
+
+ r.addEventListener('click', () => { switchPivots(src.RE); });
+ r.addEventListener('mouseenter', () => { handleEnter(box, r); });
+ r.addEventListener('mouseleave', () => { handleLeave(box); });
+ r.addEventListener('contextmenu', (e) => { showActionMenu(e, box); });
+ return r;
+ }
+
+ function drawSep(y, posTotal, negTotal) {
+ const m = document.createElement('div');
+ m.innerText = summary(posTotal, negTotal);
+ m.style.top = (y-ROW) + 'px';
+ m.style.left = PADDING + 'px';
+ m.style.width = (chart.clientWidth - PADDING*2) + 'px';
+ m.classList.add('separator');
+ return m;
+ }
+
+ // addElem registers an element that belongs to the specified src.
+ function addElem(src, elem) {
+ let list = elems.get(src);
+ if (!list) {
+ list = [];
+ elems.set(src, list);
+ }
+ list.push(elem);
+ elem.classList.toggle('hilite', matches.has(src));
+ }
+
+ // Adds or removes cl from classList of all elements for the specified source.
+ function toggleClass(src, cl, value) {
+ const list = elems.get(src);
+ if (list) {
+ for (const elem of list) {
+ elem.classList.toggle(cl, value);
+ }
+ }
+ }
+
+ // fitText sets text and font-size clipped to the specified width w.
+ function fitText(t, avail, textList) {
+ // Find first entry in textList that fits.
+ let width = avail;
+ textContext.font = FONT_SIZE + 'pt Arial';
+ for (let i = 0; i < textList.length; i++) {
+ let text = textList[i];
+ width = textContext.measureText(text).width;
+ if (width <= avail) {
+ t.innerText = text;
+ return;
+ }
+ }
+
+ // Try to fit by dropping font size.
+ let text = textList[textList.length-1];
+ const fs = Math.max(MIN_FONT_SIZE, FONT_SIZE * (avail / width));
+ t.style.fontSize = fs + 'pt';
+ t.innerText = text;
+ }
+
+ // totalValue returns the positive and negative sums of the Values of stacks
+ // listed in places.
+ function totalValue(places) {
+ const seen = new Set();
+ let pos = 0;
+ let neg = 0;
+ for (const place of places) {
+ if (seen.has(place.Stack)) continue; // Do not double-count stacks
+ seen.add(place.Stack);
+ const stack = stacks.Stacks[place.Stack];
+ if (stack.Value < 0) {
+ neg += -stack.Value;
+ } else {
+ pos += stack.Value;
+ }
+ }
+ return [pos, neg];
+ }
+
+ function summary(pos, neg) {
+ // Examples:
+ // 6s (10%)
+ // 12s (20%) 🠆 18s (30%)
+ return diff ? diffText(neg, pos) : percentText(pos);
+ }
+
+ function details(box) {
+ // Examples:
+ // 6s (10%)
+ // 6s (10%) │ self 3s (5%)
+ // 6s (10%) │ 12s (20%) 🠆 18s (30%)
+ let result = percentText(box.sumpos - box.sumneg);
+ if (box.self != 0) {
+ result += " │ self " + unitText(box.self);
+ }
+ if (diff && box.sumpos > 0 && box.sumneg > 0) {
+ result += " │ " + diffText(box.sumneg, box.sumpos);
+ }
+ return result;
+ }
+
+ // diffText returns text that displays from and to alongside their percentages.
+ // E.g., 9s (45%) 🠆 10s (50%)
+ function diffText(from, to) {
+ return percentText(from) + " 🠆 " + percentText(to);
+ }
+
+ // percentText returns text that displays v in appropriate units alongside its
+ // percentange.
+ function percentText(v) {
+ function percent(v, total) {
+ return Number(((100.0 * v) / total).toFixed(1)) + '%';
+ }
+ return unitText(v) + " (" + percent(v, stacks.Total) + ")";
+ }
+
+ // unitText returns a formatted string to display for value.
+ function unitText(value) {
+ const sign = (value < 0) ? "-" : "";
+ let v = Math.abs(value) * stacks.Scale;
+ // Rescale to appropriate display unit.
+ let unit = stacks.Unit;
+ const list = UNITS.get(unit);
+ if (list) {
+ // Find first entry in list that is not too small.
+ for (const [name, scale] of list) {
+ if (v <= 100*scale) {
+ v /= scale;
+ unit = name;
+ break;
+ }
+ }
+ }
+ return sign + Number(v.toFixed(2)) + unit;
+ }
+
+ function find(name) {
+ const elem = document.getElementById(name);
+ if (!elem) {
+ throw 'element not found: ' + name
+ }
+ return elem;
+ }
+
+ function makeColor(index) {
+ // Rotate hue around a circle. Multiple by phi to spread things
+ // out better. Use 50% saturation to make subdued colors, and
+ // 80% lightness to have good contrast with black foreground text.
+ const PHI = 1.618033988;
+ const hue = (index+1) * PHI * 2 * Math.PI; // +1 to avoid 0
+ const hsl = `hsl(${hue}rad 50% 80%)`;
+ return hsl;
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/html/top.html b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/top.html
new file mode 100644
index 0000000..86d9fcb
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/html/top.html
@@ -0,0 +1,114 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ <style type="text/css">
+ </style>
+</head>
+<body>
+ {{template "header" .}}
+ <div id="top">
+ <table id="toptable">
+ <thead>
+ <tr>
+ <th id="flathdr1">Flat</th>
+ <th id="flathdr2">Flat%</th>
+ <th>Sum%</th>
+ <th id="cumhdr1">Cum</th>
+ <th id="cumhdr2">Cum%</th>
+ <th id="namehdr">Name</th>
+ <th>Inlined?</th>
+ </tr>
+ </thead>
+ <tbody id="rows"></tbody>
+ </table>
+ </div>
+ {{template "script" .}}
+ <script>
+ function makeTopTable(total, entries) {
+ const rows = document.getElementById('rows');
+ if (rows == null) return;
+
+ // Store initial index in each entry so we have stable node ids for selection.
+ for (let i = 0; i < entries.length; i++) {
+ entries[i].Id = 'node' + i;
+ }
+
+ // Which column are we currently sorted by and in what order?
+ let currentColumn = '';
+ let descending = false;
+ sortBy('Flat');
+
+ function sortBy(column) {
+ // Update sort criteria
+ if (column == currentColumn) {
+ descending = !descending; // Reverse order
+ } else {
+ currentColumn = column;
+ descending = (column != 'Name');
+ }
+
+ // Sort according to current criteria.
+ function cmp(a, b) {
+ const av = a[currentColumn];
+ const bv = b[currentColumn];
+ if (av < bv) return -1;
+ if (av > bv) return +1;
+ return 0;
+ }
+ entries.sort(cmp);
+ if (descending) entries.reverse();
+
+ function addCell(tr, val) {
+ const td = document.createElement('td');
+ td.textContent = val;
+ tr.appendChild(td);
+ }
+
+ function percent(v) {
+ return (v * 100.0 / total).toFixed(2) + '%';
+ }
+
+ // Generate rows
+ const fragment = document.createDocumentFragment();
+ let sum = 0;
+ for (const row of entries) {
+ const tr = document.createElement('tr');
+ tr.id = row.Id;
+ sum += row.Flat;
+ addCell(tr, row.FlatFormat);
+ addCell(tr, percent(row.Flat));
+ addCell(tr, percent(sum));
+ addCell(tr, row.CumFormat);
+ addCell(tr, percent(row.Cum));
+ addCell(tr, row.Name);
+ addCell(tr, row.InlineLabel);
+ fragment.appendChild(tr);
+ }
+
+ rows.textContent = ''; // Remove old rows
+ rows.appendChild(fragment);
+ }
+
+ // Make different column headers trigger sorting.
+ function bindSort(id, column) {
+ const hdr = document.getElementById(id);
+ if (hdr == null) return;
+ const fn = function() { sortBy(column) };
+ hdr.addEventListener('click', fn);
+ hdr.addEventListener('touch', fn);
+ }
+ bindSort('flathdr1', 'Flat');
+ bindSort('flathdr2', 'Flat');
+ bindSort('cumhdr1', 'Cum');
+ bindSort('cumhdr2', 'Cum');
+ bindSort('namehdr', 'Name');
+ }
+
+ viewer(new URL(window.location.href), {{.Nodes}});
+ makeTopTable({{.Total}}, {{.Top}});
+ </script>
+</body>
+</html>
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go
new file mode 100644
index 0000000..e6e865f
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go
@@ -0,0 +1,419 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "fmt"
+ "io"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+ "github.com/google/pprof/profile"
+)
+
+var commentStart = "//:" // Sentinel for comments on options
+var tailDigitsRE = regexp.MustCompile("[0-9]+$")
+
+// interactive starts a shell to read pprof commands.
+func interactive(p *profile.Profile, o *plugin.Options) error {
+ // Enter command processing loop.
+ o.UI.SetAutoComplete(newCompleter(functionNames(p)))
+ configure("compact_labels", "true")
+ configHelp["sample_index"] += fmt.Sprintf("Or use sample_index=name, with name in %v.\n", sampleTypes(p))
+
+ // Do not wait for the visualizer to complete, to allow multiple
+ // graphs to be visualized simultaneously.
+ interactiveMode = true
+ shortcuts := profileShortcuts(p)
+
+ copier := makeProfileCopier(p)
+ greetings(p, o.UI)
+ for {
+ input, err := o.UI.ReadLine("(pprof) ")
+ if err != nil {
+ if err != io.EOF {
+ return err
+ }
+ if input == "" {
+ return nil
+ }
+ }
+
+ for _, input := range shortcuts.expand(input) {
+ // Process assignments of the form variable=value
+ if s := strings.SplitN(input, "=", 2); len(s) > 0 {
+ name := strings.TrimSpace(s[0])
+ var value string
+ if len(s) == 2 {
+ value = s[1]
+ if comment := strings.LastIndex(value, commentStart); comment != -1 {
+ value = value[:comment]
+ }
+ value = strings.TrimSpace(value)
+ }
+ if isConfigurable(name) {
+ // All non-bool options require inputs
+ if len(s) == 1 && !isBoolConfig(name) {
+ o.UI.PrintErr(fmt.Errorf("please specify a value, e.g. %s=<val>", name))
+ continue
+ }
+ if name == "sample_index" {
+ // Error check sample_index=xxx to ensure xxx is a valid sample type.
+ index, err := p.SampleIndexByName(value)
+ if err != nil {
+ o.UI.PrintErr(err)
+ continue
+ }
+ if index < 0 || index >= len(p.SampleType) {
+ o.UI.PrintErr(fmt.Errorf("invalid sample_index %q", value))
+ continue
+ }
+ value = p.SampleType[index].Type
+ }
+ if err := configure(name, value); err != nil {
+ o.UI.PrintErr(err)
+ }
+ continue
+ }
+ }
+
+ tokens := strings.Fields(input)
+ if len(tokens) == 0 {
+ continue
+ }
+
+ switch tokens[0] {
+ case "o", "options":
+ printCurrentOptions(p, o.UI)
+ continue
+ case "exit", "quit", "q":
+ return nil
+ case "help":
+ commandHelp(strings.Join(tokens[1:], " "), o.UI)
+ continue
+ }
+
+ args, cfg, err := parseCommandLine(tokens)
+ if err == nil {
+ err = generateReportWrapper(copier.newCopy(), args, cfg, o)
+ }
+
+ if err != nil {
+ o.UI.PrintErr(err)
+ }
+ }
+ }
+}
+
+var generateReportWrapper = generateReport // For testing purposes.
+
+// greetings prints a brief welcome and some overall profile
+// information before accepting interactive commands.
+func greetings(p *profile.Profile, ui plugin.UI) {
+ numLabelUnits := identifyNumLabelUnits(p, ui)
+ ropt, err := reportOptions(p, numLabelUnits, currentConfig())
+ if err == nil {
+ rpt := report.New(p, ropt)
+ ui.Print(strings.Join(report.ProfileLabels(rpt), "\n"))
+ if rpt.Total() == 0 && len(p.SampleType) > 1 {
+ ui.Print(`No samples were found with the default sample value type.`)
+ ui.Print(`Try "sample_index" command to analyze different sample values.`, "\n")
+ }
+ }
+ ui.Print(`Entering interactive mode (type "help" for commands, "o" for options)`)
+}
+
+// shortcuts represents composite commands that expand into a sequence
+// of other commands.
+type shortcuts map[string][]string
+
+func (a shortcuts) expand(input string) []string {
+ input = strings.TrimSpace(input)
+ if a != nil {
+ if r, ok := a[input]; ok {
+ return r
+ }
+ }
+ return []string{input}
+}
+
+var pprofShortcuts = shortcuts{
+ ":": []string{"focus=", "ignore=", "hide=", "tagfocus=", "tagignore="},
+}
+
+// profileShortcuts creates macros for convenience and backward compatibility.
+func profileShortcuts(p *profile.Profile) shortcuts {
+ s := pprofShortcuts
+ // Add shortcuts for sample types
+ for _, st := range p.SampleType {
+ command := fmt.Sprintf("sample_index=%s", st.Type)
+ s[st.Type] = []string{command}
+ s["total_"+st.Type] = []string{"mean=0", command}
+ s["mean_"+st.Type] = []string{"mean=1", command}
+ }
+ return s
+}
+
+func sampleTypes(p *profile.Profile) []string {
+ types := make([]string, len(p.SampleType))
+ for i, t := range p.SampleType {
+ types[i] = t.Type
+ }
+ return types
+}
+
+func printCurrentOptions(p *profile.Profile, ui plugin.UI) {
+ var args []string
+ current := currentConfig()
+ for _, f := range configFields {
+ n := f.name
+ v := current.get(f)
+ comment := ""
+ switch {
+ case len(f.choices) > 0:
+ values := append([]string{}, f.choices...)
+ sort.Strings(values)
+ comment = "[" + strings.Join(values, " | ") + "]"
+ case n == "sample_index":
+ st := sampleTypes(p)
+ if v == "" {
+ // Apply default (last sample index).
+ v = st[len(st)-1]
+ }
+ // Add comments for all sample types in profile.
+ comment = "[" + strings.Join(st, " | ") + "]"
+ case n == "source_path":
+ continue
+ case n == "nodecount" && v == "-1":
+ comment = "default"
+ case v == "":
+ // Add quotes for empty values.
+ v = `""`
+ }
+ if comment != "" {
+ comment = commentStart + " " + comment
+ }
+ args = append(args, fmt.Sprintf(" %-25s = %-20s %s", n, v, comment))
+ }
+ sort.Strings(args)
+ ui.Print(strings.Join(args, "\n"))
+}
+
+// parseCommandLine parses a command and returns the pprof command to
+// execute and the configuration to use for the report.
+func parseCommandLine(input []string) ([]string, config, error) {
+ cmd, args := input[:1], input[1:]
+ name := cmd[0]
+
+ c := pprofCommands[name]
+ if c == nil {
+ // Attempt splitting digits on abbreviated commands (eg top10)
+ if d := tailDigitsRE.FindString(name); d != "" && d != name {
+ name = name[:len(name)-len(d)]
+ cmd[0], args = name, append([]string{d}, args...)
+ c = pprofCommands[name]
+ }
+ }
+ if c == nil {
+ if _, ok := configHelp[name]; ok {
+ value := "<val>"
+ if len(args) > 0 {
+ value = args[0]
+ }
+ return nil, config{}, fmt.Errorf("did you mean: %s=%s", name, value)
+ }
+ return nil, config{}, fmt.Errorf("unrecognized command: %q", name)
+ }
+
+ if c.hasParam {
+ if len(args) == 0 {
+ return nil, config{}, fmt.Errorf("command %s requires an argument", name)
+ }
+ cmd = append(cmd, args[0])
+ args = args[1:]
+ }
+
+ // Copy config since options set in the command line should not persist.
+ vcopy := currentConfig()
+
+ var focus, ignore string
+ for i := 0; i < len(args); i++ {
+ t := args[i]
+ if n, err := strconv.ParseInt(t, 10, 32); err == nil {
+ vcopy.NodeCount = int(n)
+ continue
+ }
+ switch t[0] {
+ case '>':
+ outputFile := t[1:]
+ if outputFile == "" {
+ i++
+ if i >= len(args) {
+ return nil, config{}, fmt.Errorf("unexpected end of line after >")
+ }
+ outputFile = args[i]
+ }
+ vcopy.Output = outputFile
+ case '-':
+ if t == "--cum" || t == "-cum" {
+ vcopy.Sort = "cum"
+ continue
+ }
+ ignore = catRegex(ignore, t[1:])
+ default:
+ focus = catRegex(focus, t)
+ }
+ }
+
+ if name == "tags" {
+ if focus != "" {
+ vcopy.TagFocus = focus
+ }
+ if ignore != "" {
+ vcopy.TagIgnore = ignore
+ }
+ } else {
+ if focus != "" {
+ vcopy.Focus = focus
+ }
+ if ignore != "" {
+ vcopy.Ignore = ignore
+ }
+ }
+ if vcopy.NodeCount == -1 && (name == "text" || name == "top") {
+ vcopy.NodeCount = 10
+ }
+
+ return cmd, vcopy, nil
+}
+
+func catRegex(a, b string) string {
+ if a != "" && b != "" {
+ return a + "|" + b
+ }
+ return a + b
+}
+
+// commandHelp displays help and usage information for all Commands
+// and Variables or a specific Command or Variable.
+func commandHelp(args string, ui plugin.UI) {
+ if args == "" {
+ help := usage(false)
+ help = help + `
+ : Clear focus/ignore/hide/tagfocus/tagignore
+
+ type "help <cmd|option>" for more information
+`
+
+ ui.Print(help)
+ return
+ }
+
+ if c := pprofCommands[args]; c != nil {
+ ui.Print(c.help(args))
+ return
+ }
+
+ if help, ok := configHelp[args]; ok {
+ ui.Print(help + "\n")
+ return
+ }
+
+ ui.PrintErr("Unknown command: " + args)
+}
+
+// newCompleter creates an autocompletion function for a set of commands.
+func newCompleter(fns []string) func(string) string {
+ return func(line string) string {
+ switch tokens := strings.Fields(line); len(tokens) {
+ case 0:
+ // Nothing to complete
+ case 1:
+ // Single token -- complete command name
+ if match := matchVariableOrCommand(tokens[0]); match != "" {
+ return match
+ }
+ case 2:
+ if tokens[0] == "help" {
+ if match := matchVariableOrCommand(tokens[1]); match != "" {
+ return tokens[0] + " " + match
+ }
+ return line
+ }
+ fallthrough
+ default:
+ // Multiple tokens -- complete using functions, except for tags
+ if cmd := pprofCommands[tokens[0]]; cmd != nil && tokens[0] != "tags" {
+ lastTokenIdx := len(tokens) - 1
+ lastToken := tokens[lastTokenIdx]
+ if strings.HasPrefix(lastToken, "-") {
+ lastToken = "-" + functionCompleter(lastToken[1:], fns)
+ } else {
+ lastToken = functionCompleter(lastToken, fns)
+ }
+ return strings.Join(append(tokens[:lastTokenIdx], lastToken), " ")
+ }
+ }
+ return line
+ }
+}
+
+// matchVariableOrCommand attempts to match a string token to the prefix of a Command.
+func matchVariableOrCommand(token string) string {
+ token = strings.ToLower(token)
+ var matches []string
+ for cmd := range pprofCommands {
+ if strings.HasPrefix(cmd, token) {
+ matches = append(matches, cmd)
+ }
+ }
+ matches = append(matches, completeConfig(token)...)
+ if len(matches) == 1 {
+ return matches[0]
+ }
+ return ""
+}
+
+// functionCompleter replaces provided substring with a function
+// name retrieved from a profile if a single match exists. Otherwise,
+// it returns unchanged substring. It defaults to no-op if the profile
+// is not specified.
+func functionCompleter(substring string, fns []string) string {
+ found := ""
+ for _, fName := range fns {
+ if strings.Contains(fName, substring) {
+ if found != "" {
+ return substring
+ }
+ found = fName
+ }
+ }
+ if found != "" {
+ return found
+ }
+ return substring
+}
+
+func functionNames(p *profile.Profile) []string {
+ var fns []string
+ for _, fn := range p.Function {
+ fns = append(fns, fn.Name)
+ }
+ return fns
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/options.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/options.go
new file mode 100644
index 0000000..6e8f9fc
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/options.go
@@ -0,0 +1,100 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/google/pprof/internal/binutils"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/symbolizer"
+ "github.com/google/pprof/internal/transport"
+)
+
+// setDefaults returns a new plugin.Options with zero fields sets to
+// sensible defaults.
+func setDefaults(o *plugin.Options) *plugin.Options {
+ d := &plugin.Options{}
+ if o != nil {
+ *d = *o
+ }
+ if d.Writer == nil {
+ d.Writer = oswriter{}
+ }
+ if d.Flagset == nil {
+ d.Flagset = &GoFlags{}
+ }
+ if d.Obj == nil {
+ d.Obj = &binutils.Binutils{}
+ }
+ if d.UI == nil {
+ d.UI = &stdUI{r: bufio.NewReader(os.Stdin)}
+ }
+ if d.HTTPTransport == nil {
+ d.HTTPTransport = transport.New(d.Flagset)
+ }
+ if d.Sym == nil {
+ d.Sym = &symbolizer.Symbolizer{Obj: d.Obj, UI: d.UI, Transport: d.HTTPTransport}
+ }
+ return d
+}
+
+type stdUI struct {
+ r *bufio.Reader
+}
+
+func (ui *stdUI) ReadLine(prompt string) (string, error) {
+ os.Stdout.WriteString(prompt)
+ return ui.r.ReadString('\n')
+}
+
+func (ui *stdUI) Print(args ...interface{}) {
+ ui.fprint(os.Stderr, args)
+}
+
+func (ui *stdUI) PrintErr(args ...interface{}) {
+ ui.fprint(os.Stderr, args)
+}
+
+func (ui *stdUI) IsTerminal() bool {
+ return false
+}
+
+func (ui *stdUI) WantBrowser() bool {
+ return true
+}
+
+func (ui *stdUI) SetAutoComplete(func(string) string) {
+}
+
+func (ui *stdUI) fprint(f *os.File, args []interface{}) {
+ text := fmt.Sprint(args...)
+ if !strings.HasSuffix(text, "\n") {
+ text += "\n"
+ }
+ f.WriteString(text)
+}
+
+// oswriter implements the Writer interface using a regular file.
+type oswriter struct{}
+
+func (oswriter) Open(name string) (io.WriteCloser, error) {
+ f, err := os.Create(name)
+ return f, err
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go
new file mode 100644
index 0000000..b784618
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go
@@ -0,0 +1,158 @@
+package driver
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/url"
+ "os"
+ "path/filepath"
+)
+
+// settings holds pprof settings.
+type settings struct {
+ // Configs holds a list of named UI configurations.
+ Configs []namedConfig `json:"configs"`
+}
+
+// namedConfig associates a name with a config.
+type namedConfig struct {
+ Name string `json:"name"`
+ config
+}
+
+// settingsFileName returns the name of the file where settings should be saved.
+func settingsFileName() (string, error) {
+ // Return "pprof/settings.json" under os.UserConfigDir().
+ dir, err := os.UserConfigDir()
+ if err != nil {
+ return "", err
+ }
+ return filepath.Join(dir, "pprof", "settings.json"), nil
+}
+
+// readSettings reads settings from fname.
+func readSettings(fname string) (*settings, error) {
+ data, err := os.ReadFile(fname)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return &settings{}, nil
+ }
+ return nil, fmt.Errorf("could not read settings: %w", err)
+ }
+ settings := &settings{}
+ if err := json.Unmarshal(data, settings); err != nil {
+ return nil, fmt.Errorf("could not parse settings: %w", err)
+ }
+ for i := range settings.Configs {
+ settings.Configs[i].resetTransient()
+ }
+ return settings, nil
+}
+
+// writeSettings saves settings to fname.
+func writeSettings(fname string, settings *settings) error {
+ data, err := json.MarshalIndent(settings, "", " ")
+ if err != nil {
+ return fmt.Errorf("could not encode settings: %w", err)
+ }
+
+ // create the settings directory if it does not exist
+ // XDG specifies permissions 0700 when creating settings dirs:
+ // https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
+ if err := os.MkdirAll(filepath.Dir(fname), 0700); err != nil {
+ return fmt.Errorf("failed to create settings directory: %w", err)
+ }
+
+ if err := os.WriteFile(fname, data, 0644); err != nil {
+ return fmt.Errorf("failed to write settings: %w", err)
+ }
+ return nil
+}
+
+// configMenuEntry holds information for a single config menu entry.
+type configMenuEntry struct {
+ Name string
+ URL string
+ Current bool // Is this the currently selected config?
+ UserConfig bool // Is this a user-provided config?
+}
+
+// configMenu returns a list of items to add to a menu in the web UI.
+func configMenu(fname string, u url.URL) []configMenuEntry {
+ // Start with system configs.
+ configs := []namedConfig{{Name: "Default", config: defaultConfig()}}
+ if settings, err := readSettings(fname); err == nil {
+ // Add user configs.
+ configs = append(configs, settings.Configs...)
+ }
+
+ // Convert to menu entries.
+ result := make([]configMenuEntry, len(configs))
+ lastMatch := -1
+ for i, cfg := range configs {
+ dst, changed := cfg.config.makeURL(u)
+ if !changed {
+ lastMatch = i
+ }
+ // Use a relative URL to work in presence of stripping/redirects in webui.go.
+ rel := &url.URL{RawQuery: dst.RawQuery, ForceQuery: true}
+ result[i] = configMenuEntry{
+ Name: cfg.Name,
+ URL: rel.String(),
+ UserConfig: (i != 0),
+ }
+ }
+ // Mark the last matching config as currennt
+ if lastMatch >= 0 {
+ result[lastMatch].Current = true
+ }
+ return result
+}
+
+// editSettings edits settings by applying fn to them.
+func editSettings(fname string, fn func(s *settings) error) error {
+ settings, err := readSettings(fname)
+ if err != nil {
+ return err
+ }
+ if err := fn(settings); err != nil {
+ return err
+ }
+ return writeSettings(fname, settings)
+}
+
+// setConfig saves the config specified in request to fname.
+func setConfig(fname string, request url.URL) error {
+ q := request.Query()
+ name := q.Get("config")
+ if name == "" {
+ return fmt.Errorf("invalid config name")
+ }
+ cfg := currentConfig()
+ if err := cfg.applyURL(q); err != nil {
+ return err
+ }
+ return editSettings(fname, func(s *settings) error {
+ for i, c := range s.Configs {
+ if c.Name == name {
+ s.Configs[i].config = cfg
+ return nil
+ }
+ }
+ s.Configs = append(s.Configs, namedConfig{Name: name, config: cfg})
+ return nil
+ })
+}
+
+// removeConfig removes config from fname.
+func removeConfig(fname, config string) error {
+ return editSettings(fname, func(s *settings) error {
+ for i, c := range s.Configs {
+ if c.Name == config {
+ s.Configs = append(s.Configs[:i], s.Configs[i+1:]...)
+ return nil
+ }
+ }
+ return fmt.Errorf("config %s not found", config)
+ })
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/stacks.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/stacks.go
new file mode 100644
index 0000000..249dfe0
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/stacks.go
@@ -0,0 +1,58 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "encoding/json"
+ "html/template"
+ "net/http"
+
+ "github.com/google/pprof/internal/report"
+)
+
+// stackView generates the new flamegraph view.
+func (ui *webInterface) stackView(w http.ResponseWriter, req *http.Request) {
+ // Get all data in a report.
+ rpt, errList := ui.makeReport(w, req, []string{"svg"}, func(cfg *config) {
+ cfg.CallTree = true
+ cfg.Trim = false
+ cfg.Granularity = "filefunctions"
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Make stack data and generate corresponding JSON.
+ stacks := rpt.Stacks()
+ b, err := json.Marshal(stacks)
+ if err != nil {
+ http.Error(w, "error serializing stacks for flame graph",
+ http.StatusInternalServerError)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ nodes := make([]string, len(stacks.Sources))
+ for i, src := range stacks.Sources {
+ nodes[i] = src.FullName
+ }
+ nodes[0] = "" // root is not a real node
+
+ _, legend := report.TextItems(rpt)
+ ui.render(w, req, "stacks", rpt, errList, legend, webArgs{
+ Stacks: template.JS(b),
+ Nodes: nodes,
+ })
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go
new file mode 100644
index 0000000..62767e7
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go
@@ -0,0 +1,80 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/google/pprof/third_party/svgpan"
+)
+
+var (
+ viewBox = regexp.MustCompile(`<svg\s*width="[^"]+"\s*height="[^"]+"\s*viewBox="[^"]+"`)
+ graphID = regexp.MustCompile(`<g id="graph\d"`)
+ svgClose = regexp.MustCompile(`</svg>`)
+)
+
+// massageSVG enhances the SVG output from DOT to provide better
+// panning inside a web browser. It uses the svgpan library, which is
+// embedded into the svgpan.JSSource variable.
+func massageSVG(svg string) string {
+ // Work around for dot bug which misses quoting some ampersands,
+ // resulting on unparsable SVG.
+ svg = strings.Replace(svg, "&;", "&amp;;", -1)
+
+ // Dot's SVG output is
+ //
+ // <svg width="___" height="___"
+ // viewBox="___" xmlns=...>
+ // <g id="graph0" transform="...">
+ // ...
+ // </g>
+ // </svg>
+ //
+ // Change it to
+ //
+ // <svg width="100%" height="100%"
+ // xmlns=...>
+
+ // <script type="text/ecmascript"><![CDATA[` ..$(svgpan.JSSource)... `]]></script>`
+ // <g id="viewport" transform="translate(0,0)">
+ // <g id="graph0" transform="...">
+ // ...
+ // </g>
+ // </g>
+ // </svg>
+
+ if loc := viewBox.FindStringIndex(svg); loc != nil {
+ svg = svg[:loc[0]] +
+ `<svg width="100%" height="100%"` +
+ svg[loc[1]:]
+ }
+
+ if loc := graphID.FindStringIndex(svg); loc != nil {
+ svg = svg[:loc[0]] +
+ `<script type="text/ecmascript"><![CDATA[` + string(svgpan.JSSource) + `]]></script>` +
+ `<g id="viewport" transform="scale(0.5,0.5) translate(0,0)">` +
+ svg[loc[0]:]
+ }
+
+ if loc := svgClose.FindStringIndex(svg); loc != nil {
+ svg = svg[:loc[0]] +
+ `</g>` +
+ svg[loc[0]:]
+ }
+
+ return svg
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/tagroot.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/tagroot.go
new file mode 100644
index 0000000..76a594d
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/tagroot.go
@@ -0,0 +1,133 @@
+package driver
+
+import (
+ "strings"
+
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/profile"
+)
+
+// addLabelNodes adds pseudo stack frames "label:value" to each Sample with
+// labels matching the supplied keys.
+//
+// rootKeys adds frames at the root of the callgraph (first key becomes new root).
+// leafKeys adds frames at the leaf of the callgraph (last key becomes new leaf).
+//
+// Returns whether there were matches found for the label keys.
+func addLabelNodes(p *profile.Profile, rootKeys, leafKeys []string, outputUnit string) (rootm, leafm bool) {
+ // Find where to insert the new locations and functions at the end of
+ // their ID spaces.
+ var maxLocID uint64
+ var maxFunctionID uint64
+ for _, loc := range p.Location {
+ if loc.ID > maxLocID {
+ maxLocID = loc.ID
+ }
+ }
+ for _, f := range p.Function {
+ if f.ID > maxFunctionID {
+ maxFunctionID = f.ID
+ }
+ }
+ nextLocID := maxLocID + 1
+ nextFuncID := maxFunctionID + 1
+
+ // Intern the new locations and functions we are generating.
+ type locKey struct {
+ functionName, fileName string
+ }
+ locs := map[locKey]*profile.Location{}
+
+ internLoc := func(locKey locKey) *profile.Location {
+ loc, found := locs[locKey]
+ if found {
+ return loc
+ }
+
+ function := &profile.Function{
+ ID: nextFuncID,
+ Name: locKey.functionName,
+ Filename: locKey.fileName,
+ }
+ nextFuncID++
+ p.Function = append(p.Function, function)
+
+ loc = &profile.Location{
+ ID: nextLocID,
+ Line: []profile.Line{
+ {
+ Function: function,
+ },
+ },
+ }
+ nextLocID++
+ p.Location = append(p.Location, loc)
+ locs[locKey] = loc
+ return loc
+ }
+
+ makeLabelLocs := func(s *profile.Sample, keys []string) ([]*profile.Location, bool) {
+ var locs []*profile.Location
+ var match bool
+ for i := range keys {
+ // Loop backwards, ensuring the first tag is closest to the root,
+ // and the last tag is closest to the leaves.
+ k := keys[len(keys)-1-i]
+ values := formatLabelValues(s, k, outputUnit)
+ if len(values) > 0 {
+ match = true
+ }
+ locKey := locKey{
+ functionName: strings.Join(values, ","),
+ fileName: k,
+ }
+ loc := internLoc(locKey)
+ locs = append(locs, loc)
+ }
+ return locs, match
+ }
+
+ for _, s := range p.Sample {
+ rootsToAdd, sampleMatchedRoot := makeLabelLocs(s, rootKeys)
+ if sampleMatchedRoot {
+ rootm = true
+ }
+ leavesToAdd, sampleMatchedLeaf := makeLabelLocs(s, leafKeys)
+ if sampleMatchedLeaf {
+ leafm = true
+ }
+
+ if len(leavesToAdd)+len(rootsToAdd) == 0 {
+ continue
+ }
+
+ var newLocs []*profile.Location
+ newLocs = append(newLocs, leavesToAdd...)
+ newLocs = append(newLocs, s.Location...)
+ newLocs = append(newLocs, rootsToAdd...)
+ s.Location = newLocs
+ }
+ return
+}
+
+// formatLabelValues returns all the string and numeric labels in Sample, with
+// the numeric labels formatted according to outputUnit.
+func formatLabelValues(s *profile.Sample, k string, outputUnit string) []string {
+ var values []string
+ values = append(values, s.Label[k]...)
+ numLabels := s.NumLabel[k]
+ numUnits := s.NumUnit[k]
+ if len(numLabels) != len(numUnits) && len(numUnits) != 0 {
+ return values
+ }
+ for i, numLabel := range numLabels {
+ var value string
+ if len(numUnits) != 0 {
+ value = measurement.ScaledLabel(numLabel, numUnits[i], outputUnit)
+ } else {
+ value = measurement.ScaledLabel(numLabel, "", "")
+ }
+ values = append(values, value)
+ }
+ return values
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go
new file mode 100644
index 0000000..b6c8776
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go
@@ -0,0 +1,60 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+)
+
+// newTempFile returns a new output file in dir with the provided prefix and suffix.
+func newTempFile(dir, prefix, suffix string) (*os.File, error) {
+ for index := 1; index < 10000; index++ {
+ switch f, err := os.OpenFile(filepath.Join(dir, fmt.Sprintf("%s%03d%s", prefix, index, suffix)), os.O_RDWR|os.O_CREATE|os.O_EXCL, 0666); {
+ case err == nil:
+ return f, nil
+ case !os.IsExist(err):
+ return nil, err
+ }
+ }
+ // Give up
+ return nil, fmt.Errorf("could not create file of the form %s%03d%s", prefix, 1, suffix)
+}
+
+var tempFiles []string
+var tempFilesMu = sync.Mutex{}
+
+// deferDeleteTempFile marks a file to be deleted by next call to Cleanup()
+func deferDeleteTempFile(path string) {
+ tempFilesMu.Lock()
+ tempFiles = append(tempFiles, path)
+ tempFilesMu.Unlock()
+}
+
+// cleanupTempFiles removes any temporary files selected for deferred cleaning.
+func cleanupTempFiles() error {
+ tempFilesMu.Lock()
+ defer tempFilesMu.Unlock()
+ var lastErr error
+ for _, f := range tempFiles {
+ if err := os.Remove(f); err != nil {
+ lastErr = err
+ }
+ }
+ tempFiles = nil
+ return lastErr
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go
new file mode 100644
index 0000000..55973ff
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go
@@ -0,0 +1,71 @@
+// Copyright 2017 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "embed"
+ "fmt"
+ "html/template"
+ "os"
+
+ "github.com/google/pprof/third_party/d3flamegraph"
+)
+
+//go:embed html
+var embeddedFiles embed.FS
+
+// addTemplates adds a set of template definitions to templates.
+func addTemplates(templates *template.Template) {
+ // Load specified file.
+ loadFile := func(fname string) string {
+ data, err := embeddedFiles.ReadFile(fname)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "internal/driver: embedded file %q not found\n",
+ fname)
+ os.Exit(1)
+ }
+ return string(data)
+ }
+ loadCSS := func(fname string) string {
+ return `<style type="text/css">` + "\n" + loadFile(fname) + `</style>` + "\n"
+ }
+ loadJS := func(fname string) string {
+ return `<script>` + "\n" + loadFile(fname) + `</script>` + "\n"
+ }
+
+ // Define a named template with specified contents.
+ def := func(name, contents string) {
+ sub := template.New(name)
+ template.Must(sub.Parse(contents))
+ template.Must(templates.AddParseTree(name, sub.Tree))
+ }
+
+ // Pre-packaged third-party files.
+ def("d3flamegraphscript", d3flamegraph.JSSource)
+ def("d3flamegraphcss", d3flamegraph.CSSSource)
+
+ // Embeded files.
+ def("css", loadCSS("html/common.css"))
+ def("header", loadFile("html/header.html"))
+ def("graph", loadFile("html/graph.html"))
+ def("script", loadJS("html/common.js"))
+ def("top", loadFile("html/top.html"))
+ def("sourcelisting", loadFile("html/source.html"))
+ def("plaintext", loadFile("html/plaintext.html"))
+ def("flamegraph", loadFile("html/flamegraph.html"))
+ def("stacks", loadFile("html/stacks.html"))
+ def("stacks_css", loadCSS("html/stacks.css"))
+ def("stacks_js", loadJS("html/stacks.js"))
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go
new file mode 100644
index 0000000..41b3002
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go
@@ -0,0 +1,473 @@
+// Copyright 2017 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net"
+ "net/http"
+ gourl "net/url"
+ "os"
+ "os/exec"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+ "github.com/google/pprof/profile"
+)
+
+// webInterface holds the state needed for serving a browser based interface.
+type webInterface struct {
+ prof *profile.Profile
+ copier profileCopier
+ options *plugin.Options
+ help map[string]string
+ templates *template.Template
+ settingsFile string
+}
+
+func makeWebInterface(p *profile.Profile, copier profileCopier, opt *plugin.Options) (*webInterface, error) {
+ settingsFile, err := settingsFileName()
+ if err != nil {
+ return nil, err
+ }
+ templates := template.New("templategroup")
+ addTemplates(templates)
+ report.AddSourceTemplates(templates)
+ return &webInterface{
+ prof: p,
+ copier: copier,
+ options: opt,
+ help: make(map[string]string),
+ templates: templates,
+ settingsFile: settingsFile,
+ }, nil
+}
+
+// maxEntries is the maximum number of entries to print for text interfaces.
+const maxEntries = 50
+
+// errorCatcher is a UI that captures errors for reporting to the browser.
+type errorCatcher struct {
+ plugin.UI
+ errors []string
+}
+
+func (ec *errorCatcher) PrintErr(args ...interface{}) {
+ ec.errors = append(ec.errors, strings.TrimSuffix(fmt.Sprintln(args...), "\n"))
+ ec.UI.PrintErr(args...)
+}
+
+// webArgs contains arguments passed to templates in webhtml.go.
+type webArgs struct {
+ Title string
+ Errors []string
+ Total int64
+ SampleTypes []string
+ Legend []string
+ Help map[string]string
+ Nodes []string
+ HTMLBody template.HTML
+ TextBody string
+ Top []report.TextItem
+ FlameGraph template.JS
+ Stacks template.JS
+ Configs []configMenuEntry
+}
+
+func serveWebInterface(hostport string, p *profile.Profile, o *plugin.Options, disableBrowser bool) error {
+ host, port, err := getHostAndPort(hostport)
+ if err != nil {
+ return err
+ }
+ interactiveMode = true
+ copier := makeProfileCopier(p)
+ ui, err := makeWebInterface(p, copier, o)
+ if err != nil {
+ return err
+ }
+ for n, c := range pprofCommands {
+ ui.help[n] = c.description
+ }
+ for n, help := range configHelp {
+ ui.help[n] = help
+ }
+ ui.help["details"] = "Show information about the profile and this view"
+ ui.help["graph"] = "Display profile as a directed graph"
+ ui.help["flamegraph"] = "Display profile as a flame graph"
+ ui.help["flamegraphold"] = "Display profile as a flame graph (old version; slated for removal)"
+ ui.help["reset"] = "Show the entire profile"
+ ui.help["save_config"] = "Save current settings"
+
+ server := o.HTTPServer
+ if server == nil {
+ server = defaultWebServer
+ }
+ args := &plugin.HTTPServerArgs{
+ Hostport: net.JoinHostPort(host, strconv.Itoa(port)),
+ Host: host,
+ Port: port,
+ Handlers: map[string]http.Handler{
+ "/": http.HandlerFunc(ui.dot),
+ "/top": http.HandlerFunc(ui.top),
+ "/disasm": http.HandlerFunc(ui.disasm),
+ "/source": http.HandlerFunc(ui.source),
+ "/peek": http.HandlerFunc(ui.peek),
+ "/flamegraphold": http.HandlerFunc(ui.flamegraph),
+ "/flamegraph": http.HandlerFunc(ui.stackView),
+ "/flamegraph2": http.HandlerFunc(ui.stackView), // Support older URL
+ "/saveconfig": http.HandlerFunc(ui.saveConfig),
+ "/deleteconfig": http.HandlerFunc(ui.deleteConfig),
+ "/download": http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ w.Header().Set("Content-Type", "application/vnd.google.protobuf+gzip")
+ w.Header().Set("Content-Disposition", "attachment;filename=profile.pb.gz")
+ p.Write(w)
+ }),
+ },
+ }
+
+ url := "http://" + args.Hostport
+
+ o.UI.Print("Serving web UI on ", url)
+
+ if o.UI.WantBrowser() && !disableBrowser {
+ go openBrowser(url, o)
+ }
+ return server(args)
+}
+
+func getHostAndPort(hostport string) (string, int, error) {
+ host, portStr, err := net.SplitHostPort(hostport)
+ if err != nil {
+ return "", 0, fmt.Errorf("could not split http address: %v", err)
+ }
+ if host == "" {
+ host = "localhost"
+ }
+ var port int
+ if portStr == "" {
+ ln, err := net.Listen("tcp", net.JoinHostPort(host, "0"))
+ if err != nil {
+ return "", 0, fmt.Errorf("could not generate random port: %v", err)
+ }
+ port = ln.Addr().(*net.TCPAddr).Port
+ err = ln.Close()
+ if err != nil {
+ return "", 0, fmt.Errorf("could not generate random port: %v", err)
+ }
+ } else {
+ port, err = strconv.Atoi(portStr)
+ if err != nil {
+ return "", 0, fmt.Errorf("invalid port number: %v", err)
+ }
+ }
+ return host, port, nil
+}
+func defaultWebServer(args *plugin.HTTPServerArgs) error {
+ ln, err := net.Listen("tcp", args.Hostport)
+ if err != nil {
+ return err
+ }
+ isLocal := isLocalhost(args.Host)
+ handler := http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ if isLocal {
+ // Only allow local clients
+ host, _, err := net.SplitHostPort(req.RemoteAddr)
+ if err != nil || !isLocalhost(host) {
+ http.Error(w, "permission denied", http.StatusForbidden)
+ return
+ }
+ }
+ h := args.Handlers[req.URL.Path]
+ if h == nil {
+ // Fall back to default behavior
+ h = http.DefaultServeMux
+ }
+ h.ServeHTTP(w, req)
+ })
+
+ // We serve the ui at /ui/ and redirect there from the root. This is done
+ // to surface any problems with serving the ui at a non-root early. See:
+ //
+ // https://github.com/google/pprof/pull/348
+ mux := http.NewServeMux()
+ mux.Handle("/ui/", http.StripPrefix("/ui", handler))
+ mux.Handle("/", redirectWithQuery("/ui"))
+ s := &http.Server{Handler: mux}
+ return s.Serve(ln)
+}
+
+func redirectWithQuery(path string) http.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pathWithQuery := &gourl.URL{Path: path, RawQuery: r.URL.RawQuery}
+ http.Redirect(w, r, pathWithQuery.String(), http.StatusTemporaryRedirect)
+ }
+}
+
+func isLocalhost(host string) bool {
+ for _, v := range []string{"localhost", "127.0.0.1", "[::1]", "::1"} {
+ if host == v {
+ return true
+ }
+ }
+ return false
+}
+
+func openBrowser(url string, o *plugin.Options) {
+ // Construct URL.
+ baseURL, _ := gourl.Parse(url)
+ current := currentConfig()
+ u, _ := current.makeURL(*baseURL)
+
+ // Give server a little time to get ready.
+ time.Sleep(time.Millisecond * 500)
+
+ for _, b := range browsers() {
+ args := strings.Split(b, " ")
+ if len(args) == 0 {
+ continue
+ }
+ viewer := exec.Command(args[0], append(args[1:], u.String())...)
+ viewer.Stderr = os.Stderr
+ if err := viewer.Start(); err == nil {
+ return
+ }
+ }
+ // No visualizer succeeded, so just print URL.
+ o.UI.PrintErr(u.String())
+}
+
+// makeReport generates a report for the specified command.
+// If configEditor is not null, it is used to edit the config used for the report.
+func (ui *webInterface) makeReport(w http.ResponseWriter, req *http.Request,
+ cmd []string, configEditor func(*config)) (*report.Report, []string) {
+ cfg := currentConfig()
+ if err := cfg.applyURL(req.URL.Query()); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return nil, nil
+ }
+ if configEditor != nil {
+ configEditor(&cfg)
+ }
+ catcher := &errorCatcher{UI: ui.options.UI}
+ options := *ui.options
+ options.UI = catcher
+ _, rpt, err := generateRawReport(ui.copier.newCopy(), cmd, cfg, &options)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return nil, nil
+ }
+ return rpt, catcher.errors
+}
+
+// render generates html using the named template based on the contents of data.
+func (ui *webInterface) render(w http.ResponseWriter, req *http.Request, tmpl string,
+ rpt *report.Report, errList, legend []string, data webArgs) {
+ file := getFromLegend(legend, "File: ", "unknown")
+ profile := getFromLegend(legend, "Type: ", "unknown")
+ data.Title = file + " " + profile
+ data.Errors = errList
+ data.Total = rpt.Total()
+ data.SampleTypes = sampleTypes(ui.prof)
+ data.Legend = legend
+ data.Help = ui.help
+ data.Configs = configMenu(ui.settingsFile, *req.URL)
+
+ html := &bytes.Buffer{}
+ if err := ui.templates.ExecuteTemplate(html, tmpl, data); err != nil {
+ http.Error(w, "internal template error", http.StatusInternalServerError)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+ w.Header().Set("Content-Type", "text/html")
+ w.Write(html.Bytes())
+}
+
+// dot generates a web page containing an svg diagram.
+func (ui *webInterface) dot(w http.ResponseWriter, req *http.Request) {
+ rpt, errList := ui.makeReport(w, req, []string{"svg"}, nil)
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Generate dot graph.
+ g, config := report.GetDOT(rpt)
+ legend := config.Labels
+ config.Labels = nil
+ dot := &bytes.Buffer{}
+ graph.ComposeDot(dot, g, &graph.DotAttributes{}, config)
+
+ // Convert to svg.
+ svg, err := dotToSvg(dot.Bytes())
+ if err != nil {
+ http.Error(w, "Could not execute dot; may need to install graphviz.",
+ http.StatusNotImplemented)
+ ui.options.UI.PrintErr("Failed to execute dot. Is Graphviz installed?\n", err)
+ return
+ }
+
+ // Get all node names into an array.
+ nodes := []string{""} // dot starts with node numbered 1
+ for _, n := range g.Nodes {
+ nodes = append(nodes, n.Info.Name)
+ }
+
+ ui.render(w, req, "graph", rpt, errList, legend, webArgs{
+ HTMLBody: template.HTML(string(svg)),
+ Nodes: nodes,
+ })
+}
+
+func dotToSvg(dot []byte) ([]byte, error) {
+ cmd := exec.Command("dot", "-Tsvg")
+ out := &bytes.Buffer{}
+ cmd.Stdin, cmd.Stdout, cmd.Stderr = bytes.NewBuffer(dot), out, os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, err
+ }
+
+ // Fix dot bug related to unquoted ampersands.
+ svg := bytes.Replace(out.Bytes(), []byte("&;"), []byte("&amp;;"), -1)
+
+ // Cleanup for embedding by dropping stuff before the <svg> start.
+ if pos := bytes.Index(svg, []byte("<svg")); pos >= 0 {
+ svg = svg[pos:]
+ }
+ return svg, nil
+}
+
+func (ui *webInterface) top(w http.ResponseWriter, req *http.Request) {
+ rpt, errList := ui.makeReport(w, req, []string{"top"}, func(cfg *config) {
+ cfg.NodeCount = 500
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+ top, legend := report.TextItems(rpt)
+ var nodes []string
+ for _, item := range top {
+ nodes = append(nodes, item.Name)
+ }
+
+ ui.render(w, req, "top", rpt, errList, legend, webArgs{
+ Top: top,
+ Nodes: nodes,
+ })
+}
+
+// disasm generates a web page containing disassembly.
+func (ui *webInterface) disasm(w http.ResponseWriter, req *http.Request) {
+ args := []string{"disasm", req.URL.Query().Get("f")}
+ rpt, errList := ui.makeReport(w, req, args, nil)
+ if rpt == nil {
+ return // error already reported
+ }
+
+ out := &bytes.Buffer{}
+ if err := report.PrintAssembly(out, rpt, ui.options.Obj, maxEntries); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ legend := report.ProfileLabels(rpt)
+ ui.render(w, req, "plaintext", rpt, errList, legend, webArgs{
+ TextBody: out.String(),
+ })
+
+}
+
+// source generates a web page containing source code annotated with profile
+// data.
+func (ui *webInterface) source(w http.ResponseWriter, req *http.Request) {
+ args := []string{"weblist", req.URL.Query().Get("f")}
+ rpt, errList := ui.makeReport(w, req, args, nil)
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Generate source listing.
+ var body bytes.Buffer
+ if err := report.PrintWebList(&body, rpt, ui.options.Obj, maxEntries); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ legend := report.ProfileLabels(rpt)
+ ui.render(w, req, "sourcelisting", rpt, errList, legend, webArgs{
+ HTMLBody: template.HTML(body.String()),
+ })
+}
+
+// peek generates a web page listing callers/callers.
+func (ui *webInterface) peek(w http.ResponseWriter, req *http.Request) {
+ args := []string{"peek", req.URL.Query().Get("f")}
+ rpt, errList := ui.makeReport(w, req, args, func(cfg *config) {
+ cfg.Granularity = "lines"
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+
+ out := &bytes.Buffer{}
+ if err := report.Generate(out, rpt, ui.options.Obj); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ legend := report.ProfileLabels(rpt)
+ ui.render(w, req, "plaintext", rpt, errList, legend, webArgs{
+ TextBody: out.String(),
+ })
+}
+
+// saveConfig saves URL configuration.
+func (ui *webInterface) saveConfig(w http.ResponseWriter, req *http.Request) {
+ if err := setConfig(ui.settingsFile, *req.URL); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+}
+
+// deleteConfig deletes a configuration.
+func (ui *webInterface) deleteConfig(w http.ResponseWriter, req *http.Request) {
+ name := req.URL.Query().Get("config")
+ if err := removeConfig(ui.settingsFile, name); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+}
+
+// getFromLegend returns the suffix of an entry in legend that starts
+// with param. It returns def if no such entry is found.
+func getFromLegend(legend []string, param, def string) string {
+ for _, s := range legend {
+ if strings.HasPrefix(s, param) {
+ return s[len(param):]
+ }
+ }
+ return def
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go b/src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go
new file mode 100644
index 0000000..718481b
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go
@@ -0,0 +1,383 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package elfexec provides utility routines to examine ELF binaries.
+package elfexec
+
+import (
+ "bufio"
+ "debug/elf"
+ "encoding/binary"
+ "fmt"
+ "io"
+)
+
+const (
+ maxNoteSize = 1 << 20 // in bytes
+ noteTypeGNUBuildID = 3
+)
+
+// elfNote is the payload of a Note Section in an ELF file.
+type elfNote struct {
+ Name string // Contents of the "name" field, omitting the trailing zero byte.
+ Desc []byte // Contents of the "desc" field.
+ Type uint32 // Contents of the "type" field.
+}
+
+// parseNotes returns the notes from a SHT_NOTE section or PT_NOTE segment.
+func parseNotes(reader io.Reader, alignment int, order binary.ByteOrder) ([]elfNote, error) {
+ r := bufio.NewReader(reader)
+
+ // padding returns the number of bytes required to pad the given size to an
+ // alignment boundary.
+ padding := func(size int) int {
+ return ((size + (alignment - 1)) &^ (alignment - 1)) - size
+ }
+
+ var notes []elfNote
+ for {
+ noteHeader := make([]byte, 12) // 3 4-byte words
+ if _, err := io.ReadFull(r, noteHeader); err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+ namesz := order.Uint32(noteHeader[0:4])
+ descsz := order.Uint32(noteHeader[4:8])
+ typ := order.Uint32(noteHeader[8:12])
+
+ if uint64(namesz) > uint64(maxNoteSize) {
+ return nil, fmt.Errorf("note name too long (%d bytes)", namesz)
+ }
+ var name string
+ if namesz > 0 {
+ // Documentation differs as to whether namesz is meant to include the
+ // trailing zero, but everyone agrees that name is null-terminated.
+ // So we'll just determine the actual length after the fact.
+ var err error
+ name, err = r.ReadString('\x00')
+ if err == io.EOF {
+ return nil, fmt.Errorf("missing note name (want %d bytes)", namesz)
+ } else if err != nil {
+ return nil, err
+ }
+ namesz = uint32(len(name))
+ name = name[:len(name)-1]
+ }
+
+ // Drop padding bytes until the desc field.
+ for n := padding(len(noteHeader) + int(namesz)); n > 0; n-- {
+ if _, err := r.ReadByte(); err == io.EOF {
+ return nil, fmt.Errorf(
+ "missing %d bytes of padding after note name", n)
+ } else if err != nil {
+ return nil, err
+ }
+ }
+
+ if uint64(descsz) > uint64(maxNoteSize) {
+ return nil, fmt.Errorf("note desc too long (%d bytes)", descsz)
+ }
+ desc := make([]byte, int(descsz))
+ if _, err := io.ReadFull(r, desc); err == io.EOF {
+ return nil, fmt.Errorf("missing desc (want %d bytes)", len(desc))
+ } else if err != nil {
+ return nil, err
+ }
+
+ notes = append(notes, elfNote{Name: name, Desc: desc, Type: typ})
+
+ // Drop padding bytes until the next note or the end of the section,
+ // whichever comes first.
+ for n := padding(len(desc)); n > 0; n-- {
+ if _, err := r.ReadByte(); err == io.EOF {
+ // We hit the end of the section before an alignment boundary.
+ // This can happen if this section is at the end of the file or the next
+ // section has a smaller alignment requirement.
+ break
+ } else if err != nil {
+ return nil, err
+ }
+ }
+ }
+ return notes, nil
+}
+
+// GetBuildID returns the GNU build-ID for an ELF binary.
+//
+// If no build-ID was found but the binary was read without error, it returns
+// (nil, nil).
+func GetBuildID(binary io.ReaderAt) ([]byte, error) {
+ f, err := elf.NewFile(binary)
+ if err != nil {
+ return nil, err
+ }
+
+ findBuildID := func(notes []elfNote) ([]byte, error) {
+ var buildID []byte
+ for _, note := range notes {
+ if note.Name == "GNU" && note.Type == noteTypeGNUBuildID {
+ if buildID == nil {
+ buildID = note.Desc
+ } else {
+ return nil, fmt.Errorf("multiple build ids found, don't know which to use")
+ }
+ }
+ }
+ return buildID, nil
+ }
+
+ for _, p := range f.Progs {
+ if p.Type != elf.PT_NOTE {
+ continue
+ }
+ notes, err := parseNotes(p.Open(), int(p.Align), f.ByteOrder)
+ if err != nil {
+ return nil, err
+ }
+ if b, err := findBuildID(notes); b != nil || err != nil {
+ return b, err
+ }
+ }
+ for _, s := range f.Sections {
+ if s.Type != elf.SHT_NOTE {
+ continue
+ }
+ notes, err := parseNotes(s.Open(), int(s.Addralign), f.ByteOrder)
+ if err != nil {
+ return nil, err
+ }
+ if b, err := findBuildID(notes); b != nil || err != nil {
+ return b, err
+ }
+ }
+ return nil, nil
+}
+
+// kernelBase calculates the base for kernel mappings, which usually require
+// special handling. For kernel mappings, tools (like perf) use the address of
+// the kernel relocation symbol (_text or _stext) as the mmap start. Additionally,
+// for obfuscation, ChromeOS profiles have the kernel image remapped to the 0-th page.
+func kernelBase(loadSegment *elf.ProgHeader, stextOffset *uint64, start, limit, offset uint64) (uint64, bool) {
+ const (
+ // PAGE_OFFSET for PowerPC64, see arch/powerpc/Kconfig in the kernel sources.
+ pageOffsetPpc64 = 0xc000000000000000
+ pageSize = 4096
+ )
+
+ if loadSegment.Vaddr == start-offset {
+ return offset, true
+ }
+ if start == 0 && limit != 0 && stextOffset != nil {
+ // ChromeOS remaps its kernel to 0. Nothing else should come
+ // down this path. Empirical values:
+ // VADDR=0xffffffff80200000
+ // stextOffset=0xffffffff80200198
+ return start - *stextOffset, true
+ }
+ if start >= loadSegment.Vaddr && limit > start && (offset == 0 || offset == pageOffsetPpc64 || offset == start) {
+ // Some kernels look like:
+ // VADDR=0xffffffff80200000
+ // stextOffset=0xffffffff80200198
+ // Start=0xffffffff83200000
+ // Limit=0xffffffff84200000
+ // Offset=0 (0xc000000000000000 for PowerPC64) (== Start for ASLR kernel)
+ // So the base should be:
+ if stextOffset != nil && (start%pageSize) == (*stextOffset%pageSize) {
+ // perf uses the address of _stext as start. Some tools may
+ // adjust for this before calling GetBase, in which case the page
+ // alignment should be different from that of stextOffset.
+ return start - *stextOffset, true
+ }
+
+ return start - loadSegment.Vaddr, true
+ }
+ if start%pageSize != 0 && stextOffset != nil && *stextOffset%pageSize == start%pageSize {
+ // ChromeOS remaps its kernel to 0 + start%pageSize. Nothing
+ // else should come down this path. Empirical values:
+ // start=0x198 limit=0x2f9fffff offset=0
+ // VADDR=0xffffffff81000000
+ // stextOffset=0xffffffff81000198
+ return start - *stextOffset, true
+ }
+ return 0, false
+}
+
+// GetBase determines the base address to subtract from virtual
+// address to get symbol table address. For an executable, the base
+// is 0. Otherwise, it's a shared library, and the base is the
+// address where the mapping starts. The kernel needs special handling.
+func GetBase(fh *elf.FileHeader, loadSegment *elf.ProgHeader, stextOffset *uint64, start, limit, offset uint64) (uint64, error) {
+
+ if start == 0 && offset == 0 && (limit == ^uint64(0) || limit == 0) {
+ // Some tools may introduce a fake mapping that spans the entire
+ // address space. Assume that the address has already been
+ // adjusted, so no additional base adjustment is necessary.
+ return 0, nil
+ }
+
+ switch fh.Type {
+ case elf.ET_EXEC:
+ if loadSegment == nil {
+ // Assume fixed-address executable and so no adjustment.
+ return 0, nil
+ }
+ if stextOffset == nil && start > 0 && start < 0x8000000000000000 {
+ // A regular user-mode executable. Compute the base offset using same
+ // arithmetics as in ET_DYN case below, see the explanation there.
+ // Ideally, the condition would just be "stextOffset == nil" as that
+ // represents the address of _stext symbol in the vmlinux image. Alas,
+ // the caller may skip reading it from the binary (it's expensive to scan
+ // all the symbols) and so it may be nil even for the kernel executable.
+ // So additionally check that the start is within the user-mode half of
+ // the 64-bit address space.
+ return start - offset + loadSegment.Off - loadSegment.Vaddr, nil
+ }
+ // Various kernel heuristics and cases are handled separately.
+ if base, match := kernelBase(loadSegment, stextOffset, start, limit, offset); match {
+ return base, nil
+ }
+ // ChromeOS can remap its kernel to 0, and the caller might have not found
+ // the _stext symbol. Split this case from kernelBase() above, since we don't
+ // want to apply it to an ET_DYN user-mode executable.
+ if start == 0 && limit != 0 && stextOffset == nil {
+ return start - loadSegment.Vaddr, nil
+ }
+
+ return 0, fmt.Errorf("don't know how to handle EXEC segment: %v start=0x%x limit=0x%x offset=0x%x", *loadSegment, start, limit, offset)
+ case elf.ET_REL:
+ if offset != 0 {
+ return 0, fmt.Errorf("don't know how to handle mapping.Offset")
+ }
+ return start, nil
+ case elf.ET_DYN:
+ // The process mapping information, start = start of virtual address range,
+ // and offset = offset in the executable file of the start address, tells us
+ // that a runtime virtual address x maps to a file offset
+ // fx = x - start + offset.
+ if loadSegment == nil {
+ return start - offset, nil
+ }
+ // Kernels compiled as PIE can be ET_DYN as well. Use heuristic, similar to
+ // the ET_EXEC case above.
+ if base, match := kernelBase(loadSegment, stextOffset, start, limit, offset); match {
+ return base, nil
+ }
+ // The program header, if not nil, indicates the offset in the file where
+ // the executable segment is located (loadSegment.Off), and the base virtual
+ // address where the first byte of the segment is loaded
+ // (loadSegment.Vaddr). A file offset fx maps to a virtual (symbol) address
+ // sx = fx - loadSegment.Off + loadSegment.Vaddr.
+ //
+ // Thus, a runtime virtual address x maps to a symbol address
+ // sx = x - start + offset - loadSegment.Off + loadSegment.Vaddr.
+ return start - offset + loadSegment.Off - loadSegment.Vaddr, nil
+ }
+ return 0, fmt.Errorf("don't know how to handle FileHeader.Type %v", fh.Type)
+}
+
+// FindTextProgHeader finds the program segment header containing the .text
+// section or nil if the segment cannot be found.
+func FindTextProgHeader(f *elf.File) *elf.ProgHeader {
+ for _, s := range f.Sections {
+ if s.Name == ".text" {
+ // Find the LOAD segment containing the .text section.
+ for _, p := range f.Progs {
+ if p.Type == elf.PT_LOAD && p.Flags&elf.PF_X != 0 && s.Addr >= p.Vaddr && s.Addr < p.Vaddr+p.Memsz {
+ return &p.ProgHeader
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// ProgramHeadersForMapping returns the program segment headers that overlap
+// the runtime mapping with file offset mapOff and memory size mapSz. We skip
+// over segments zero file size because their file offset values are unreliable.
+// Even if overlapping, a segment is not selected if its aligned file offset is
+// greater than the mapping file offset, or if the mapping includes the last
+// page of the segment, but not the full segment and the mapping includes
+// additional pages after the segment end.
+// The function returns a slice of pointers to the headers in the input
+// slice, which are valid only while phdrs is not modified or discarded.
+func ProgramHeadersForMapping(phdrs []elf.ProgHeader, mapOff, mapSz uint64) []*elf.ProgHeader {
+ const (
+ // pageSize defines the virtual memory page size used by the loader. This
+ // value is dependent on the memory management unit of the CPU. The page
+ // size is 4KB virtually on all the architectures that we care about, so we
+ // define this metric as a constant. If we encounter architectures where
+ // page sie is not 4KB, we must try to guess the page size on the system
+ // where the profile was collected, possibly using the architecture
+ // specified in the ELF file header.
+ pageSize = 4096
+ pageOffsetMask = pageSize - 1
+ )
+ mapLimit := mapOff + mapSz
+ var headers []*elf.ProgHeader
+ for i := range phdrs {
+ p := &phdrs[i]
+ // Skip over segments with zero file size. Their file offsets can have
+ // arbitrary values, see b/195427553.
+ if p.Filesz == 0 {
+ continue
+ }
+ segLimit := p.Off + p.Memsz
+ // The segment must overlap the mapping.
+ if p.Type == elf.PT_LOAD && mapOff < segLimit && p.Off < mapLimit {
+ // If the mapping offset is strictly less than the page aligned segment
+ // offset, then this mapping comes from a different segment, fixes
+ // b/179920361.
+ alignedSegOffset := uint64(0)
+ if p.Off > (p.Vaddr & pageOffsetMask) {
+ alignedSegOffset = p.Off - (p.Vaddr & pageOffsetMask)
+ }
+ if mapOff < alignedSegOffset {
+ continue
+ }
+ // If the mapping starts in the middle of the segment, it covers less than
+ // one page of the segment, and it extends at least one page past the
+ // segment, then this mapping comes from a different segment.
+ if mapOff > p.Off && (segLimit < mapOff+pageSize) && (mapLimit >= segLimit+pageSize) {
+ continue
+ }
+ headers = append(headers, p)
+ }
+ }
+ return headers
+}
+
+// HeaderForFileOffset attempts to identify a unique program header that
+// includes the given file offset. It returns an error if it cannot identify a
+// unique header.
+func HeaderForFileOffset(headers []*elf.ProgHeader, fileOffset uint64) (*elf.ProgHeader, error) {
+ var ph *elf.ProgHeader
+ for _, h := range headers {
+ if fileOffset >= h.Off && fileOffset < h.Off+h.Memsz {
+ if ph != nil {
+ // Assuming no other bugs, this can only happen if we have two or
+ // more small program segments that fit on the same page, and a
+ // segment other than the last one includes uninitialized data, or
+ // if the debug binary used for symbolization is stripped of some
+ // sections, so segment file sizes are smaller than memory sizes.
+ return nil, fmt.Errorf("found second program header (%#v) that matches file offset %x, first program header is %#v. Is this a stripped binary, or does the first program segment contain uninitialized data?", *h, fileOffset, *ph)
+ }
+ ph = h
+ }
+ }
+ if ph == nil {
+ return nil, fmt.Errorf("no program header matches file offset %x", fileOffset)
+ }
+ return ph, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go b/src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go
new file mode 100644
index 0000000..09d40fd
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go
@@ -0,0 +1,494 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package graph
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "path/filepath"
+ "strings"
+
+ "github.com/google/pprof/internal/measurement"
+)
+
+// DotAttributes contains details about the graph itself, giving
+// insight into how its elements should be rendered.
+type DotAttributes struct {
+ Nodes map[*Node]*DotNodeAttributes // A map allowing each Node to have its own visualization option
+}
+
+// DotNodeAttributes contains Node specific visualization options.
+type DotNodeAttributes struct {
+ Shape string // The optional shape of the node when rendered visually
+ Bold bool // If the node should be bold or not
+ Peripheries int // An optional number of borders to place around a node
+ URL string // An optional url link to add to a node
+ Formatter func(*NodeInfo) string // An optional formatter for the node's label
+}
+
+// DotConfig contains attributes about how a graph should be
+// constructed and how it should look.
+type DotConfig struct {
+ Title string // The title of the DOT graph
+ LegendURL string // The URL to link to from the legend.
+ Labels []string // The labels for the DOT's legend
+
+ FormatValue func(int64) string // A formatting function for values
+ Total int64 // The total weight of the graph, used to compute percentages
+}
+
+const maxNodelets = 4 // Number of nodelets for labels (both numeric and non)
+
+// ComposeDot creates and writes a in the DOT format to the writer, using
+// the configurations given.
+func ComposeDot(w io.Writer, g *Graph, a *DotAttributes, c *DotConfig) {
+ builder := &builder{w, a, c}
+
+ // Begin constructing DOT by adding a title and legend.
+ builder.start()
+ defer builder.finish()
+ builder.addLegend()
+
+ if len(g.Nodes) == 0 {
+ return
+ }
+
+ // Preprocess graph to get id map and find max flat.
+ nodeIDMap := make(map[*Node]int)
+ hasNodelets := make(map[*Node]bool)
+
+ maxFlat := float64(abs64(g.Nodes[0].FlatValue()))
+ for i, n := range g.Nodes {
+ nodeIDMap[n] = i + 1
+ if float64(abs64(n.FlatValue())) > maxFlat {
+ maxFlat = float64(abs64(n.FlatValue()))
+ }
+ }
+
+ edges := EdgeMap{}
+
+ // Add nodes and nodelets to DOT builder.
+ for _, n := range g.Nodes {
+ builder.addNode(n, nodeIDMap[n], maxFlat)
+ hasNodelets[n] = builder.addNodelets(n, nodeIDMap[n])
+
+ // Collect all edges. Use a fake node to support multiple incoming edges.
+ for _, e := range n.Out {
+ edges[&Node{}] = e
+ }
+ }
+
+ // Add edges to DOT builder. Sort edges by frequency as a hint to the graph layout engine.
+ for _, e := range edges.Sort() {
+ builder.addEdge(e, nodeIDMap[e.Src], nodeIDMap[e.Dest], hasNodelets[e.Src])
+ }
+}
+
+// builder wraps an io.Writer and understands how to compose DOT formatted elements.
+type builder struct {
+ io.Writer
+ attributes *DotAttributes
+ config *DotConfig
+}
+
+// start generates a title and initial node in DOT format.
+func (b *builder) start() {
+ graphname := "unnamed"
+ if b.config.Title != "" {
+ graphname = b.config.Title
+ }
+ fmt.Fprintln(b, `digraph "`+graphname+`" {`)
+ fmt.Fprintln(b, `node [style=filled fillcolor="#f8f8f8"]`)
+}
+
+// finish closes the opening curly bracket in the constructed DOT buffer.
+func (b *builder) finish() {
+ fmt.Fprintln(b, "}")
+}
+
+// addLegend generates a legend in DOT format.
+func (b *builder) addLegend() {
+ labels := b.config.Labels
+ if len(labels) == 0 {
+ return
+ }
+ title := labels[0]
+ fmt.Fprintf(b, `subgraph cluster_L { "%s" [shape=box fontsize=16`, escapeForDot(title))
+ fmt.Fprintf(b, ` label="%s\l"`, strings.Join(escapeAllForDot(labels), `\l`))
+ if b.config.LegendURL != "" {
+ fmt.Fprintf(b, ` URL="%s" target="_blank"`, b.config.LegendURL)
+ }
+ if b.config.Title != "" {
+ fmt.Fprintf(b, ` tooltip="%s"`, b.config.Title)
+ }
+ fmt.Fprintf(b, "] }\n")
+}
+
+// addNode generates a graph node in DOT format.
+func (b *builder) addNode(node *Node, nodeID int, maxFlat float64) {
+ flat, cum := node.FlatValue(), node.CumValue()
+ attrs := b.attributes.Nodes[node]
+
+ // Populate label for node.
+ var label string
+ if attrs != nil && attrs.Formatter != nil {
+ label = attrs.Formatter(&node.Info)
+ } else {
+ label = multilinePrintableName(&node.Info)
+ }
+
+ flatValue := b.config.FormatValue(flat)
+ if flat != 0 {
+ label = label + fmt.Sprintf(`%s (%s)`,
+ flatValue,
+ strings.TrimSpace(measurement.Percentage(flat, b.config.Total)))
+ } else {
+ label = label + "0"
+ }
+ cumValue := flatValue
+ if cum != flat {
+ if flat != 0 {
+ label = label + `\n`
+ } else {
+ label = label + " "
+ }
+ cumValue = b.config.FormatValue(cum)
+ label = label + fmt.Sprintf(`of %s (%s)`,
+ cumValue,
+ strings.TrimSpace(measurement.Percentage(cum, b.config.Total)))
+ }
+
+ // Scale font sizes from 8 to 24 based on percentage of flat frequency.
+ // Use non linear growth to emphasize the size difference.
+ baseFontSize, maxFontGrowth := 8, 16.0
+ fontSize := baseFontSize
+ if maxFlat != 0 && flat != 0 && float64(abs64(flat)) <= maxFlat {
+ fontSize += int(math.Ceil(maxFontGrowth * math.Sqrt(float64(abs64(flat))/maxFlat)))
+ }
+
+ // Determine node shape.
+ shape := "box"
+ if attrs != nil && attrs.Shape != "" {
+ shape = attrs.Shape
+ }
+
+ // Create DOT attribute for node.
+ attr := fmt.Sprintf(`label="%s" id="node%d" fontsize=%d shape=%s tooltip="%s (%s)" color="%s" fillcolor="%s"`,
+ label, nodeID, fontSize, shape, escapeForDot(node.Info.PrintableName()), cumValue,
+ dotColor(float64(node.CumValue())/float64(abs64(b.config.Total)), false),
+ dotColor(float64(node.CumValue())/float64(abs64(b.config.Total)), true))
+
+ // Add on extra attributes if provided.
+ if attrs != nil {
+ // Make bold if specified.
+ if attrs.Bold {
+ attr += ` style="bold,filled"`
+ }
+
+ // Add peripheries if specified.
+ if attrs.Peripheries != 0 {
+ attr += fmt.Sprintf(` peripheries=%d`, attrs.Peripheries)
+ }
+
+ // Add URL if specified. target="_blank" forces the link to open in a new tab.
+ if attrs.URL != "" {
+ attr += fmt.Sprintf(` URL="%s" target="_blank"`, attrs.URL)
+ }
+ }
+
+ fmt.Fprintf(b, "N%d [%s]\n", nodeID, attr)
+}
+
+// addNodelets generates the DOT boxes for the node tags if they exist.
+func (b *builder) addNodelets(node *Node, nodeID int) bool {
+ var nodelets string
+
+ // Populate two Tag slices, one for LabelTags and one for NumericTags.
+ var ts []*Tag
+ lnts := make(map[string][]*Tag)
+ for _, t := range node.LabelTags {
+ ts = append(ts, t)
+ }
+ for l, tm := range node.NumericTags {
+ for _, t := range tm {
+ lnts[l] = append(lnts[l], t)
+ }
+ }
+
+ // For leaf nodes, print cumulative tags (includes weight from
+ // children that have been deleted).
+ // For internal nodes, print only flat tags.
+ flatTags := len(node.Out) > 0
+
+ // Select the top maxNodelets alphanumeric labels by weight.
+ SortTags(ts, flatTags)
+ if len(ts) > maxNodelets {
+ ts = ts[:maxNodelets]
+ }
+ for i, t := range ts {
+ w := t.CumValue()
+ if flatTags {
+ w = t.FlatValue()
+ }
+ if w == 0 {
+ continue
+ }
+ weight := b.config.FormatValue(w)
+ nodelets += fmt.Sprintf(`N%d_%d [label = "%s" id="N%d_%d" fontsize=8 shape=box3d tooltip="%s"]`+"\n", nodeID, i, t.Name, nodeID, i, weight)
+ nodelets += fmt.Sprintf(`N%d -> N%d_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"]`+"\n", nodeID, nodeID, i, weight, weight, weight)
+ if nts := lnts[t.Name]; nts != nil {
+ nodelets += b.numericNodelets(nts, maxNodelets, flatTags, fmt.Sprintf(`N%d_%d`, nodeID, i))
+ }
+ }
+
+ if nts := lnts[""]; nts != nil {
+ nodelets += b.numericNodelets(nts, maxNodelets, flatTags, fmt.Sprintf(`N%d`, nodeID))
+ }
+
+ fmt.Fprint(b, nodelets)
+ return nodelets != ""
+}
+
+func (b *builder) numericNodelets(nts []*Tag, maxNumNodelets int, flatTags bool, source string) string {
+ nodelets := ""
+
+ // Collapse numeric labels into maxNumNodelets buckets, of the form:
+ // 1MB..2MB, 3MB..5MB, ...
+ for j, t := range b.collapsedTags(nts, maxNumNodelets, flatTags) {
+ w, attr := t.CumValue(), ` style="dotted"`
+ if flatTags || t.FlatValue() == t.CumValue() {
+ w, attr = t.FlatValue(), ""
+ }
+ if w != 0 {
+ weight := b.config.FormatValue(w)
+ nodelets += fmt.Sprintf(`N%s_%d [label = "%s" id="N%s_%d" fontsize=8 shape=box3d tooltip="%s"]`+"\n", source, j, t.Name, source, j, weight)
+ nodelets += fmt.Sprintf(`%s -> N%s_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"%s]`+"\n", source, source, j, weight, weight, weight, attr)
+ }
+ }
+ return nodelets
+}
+
+// addEdge generates a graph edge in DOT format.
+func (b *builder) addEdge(edge *Edge, from, to int, hasNodelets bool) {
+ var inline string
+ if edge.Inline {
+ inline = `\n (inline)`
+ }
+ w := b.config.FormatValue(edge.WeightValue())
+ attr := fmt.Sprintf(`label=" %s%s"`, w, inline)
+ if b.config.Total != 0 {
+ // Note: edge.weight > b.config.Total is possible for profile diffs.
+ if weight := 1 + int(min64(abs64(edge.WeightValue()*100/b.config.Total), 100)); weight > 1 {
+ attr = fmt.Sprintf(`%s weight=%d`, attr, weight)
+ }
+ if width := 1 + int(min64(abs64(edge.WeightValue()*5/b.config.Total), 5)); width > 1 {
+ attr = fmt.Sprintf(`%s penwidth=%d`, attr, width)
+ }
+ attr = fmt.Sprintf(`%s color="%s"`, attr,
+ dotColor(float64(edge.WeightValue())/float64(abs64(b.config.Total)), false))
+ }
+ arrow := "->"
+ if edge.Residual {
+ arrow = "..."
+ }
+ tooltip := fmt.Sprintf(`"%s %s %s (%s)"`,
+ escapeForDot(edge.Src.Info.PrintableName()), arrow,
+ escapeForDot(edge.Dest.Info.PrintableName()), w)
+ attr = fmt.Sprintf(`%s tooltip=%s labeltooltip=%s`, attr, tooltip, tooltip)
+
+ if edge.Residual {
+ attr = attr + ` style="dotted"`
+ }
+
+ if hasNodelets {
+ // Separate children further if source has tags.
+ attr = attr + " minlen=2"
+ }
+
+ fmt.Fprintf(b, "N%d -> N%d [%s]\n", from, to, attr)
+}
+
+// dotColor returns a color for the given score (between -1.0 and
+// 1.0), with -1.0 colored green, 0.0 colored grey, and 1.0 colored
+// red. If isBackground is true, then a light (low-saturation)
+// color is returned (suitable for use as a background color);
+// otherwise, a darker color is returned (suitable for use as a
+// foreground color).
+func dotColor(score float64, isBackground bool) string {
+ // A float between 0.0 and 1.0, indicating the extent to which
+ // colors should be shifted away from grey (to make positive and
+ // negative values easier to distinguish, and to make more use of
+ // the color range.)
+ const shift = 0.7
+
+ // Saturation and value (in hsv colorspace) for background colors.
+ const bgSaturation = 0.1
+ const bgValue = 0.93
+
+ // Saturation and value (in hsv colorspace) for foreground colors.
+ const fgSaturation = 1.0
+ const fgValue = 0.7
+
+ // Choose saturation and value based on isBackground.
+ var saturation float64
+ var value float64
+ if isBackground {
+ saturation = bgSaturation
+ value = bgValue
+ } else {
+ saturation = fgSaturation
+ value = fgValue
+ }
+
+ // Limit the score values to the range [-1.0, 1.0].
+ score = math.Max(-1.0, math.Min(1.0, score))
+
+ // Reduce saturation near score=0 (so it is colored grey, rather than yellow).
+ if math.Abs(score) < 0.2 {
+ saturation *= math.Abs(score) / 0.2
+ }
+
+ // Apply 'shift' to move scores away from 0.0 (grey).
+ if score > 0.0 {
+ score = math.Pow(score, (1.0 - shift))
+ }
+ if score < 0.0 {
+ score = -math.Pow(-score, (1.0 - shift))
+ }
+
+ var r, g, b float64 // red, green, blue
+ if score < 0.0 {
+ g = value
+ r = value * (1 + saturation*score)
+ } else {
+ r = value
+ g = value * (1 - saturation*score)
+ }
+ b = value * (1 - saturation)
+ return fmt.Sprintf("#%02x%02x%02x", uint8(r*255.0), uint8(g*255.0), uint8(b*255.0))
+}
+
+func multilinePrintableName(info *NodeInfo) string {
+ infoCopy := *info
+ infoCopy.Name = escapeForDot(ShortenFunctionName(infoCopy.Name))
+ infoCopy.Name = strings.Replace(infoCopy.Name, "::", `\n`, -1)
+ // Go type parameters are reported as "[...]" by Go pprof profiles.
+ // Keep this ellipsis rather than replacing with newlines below.
+ infoCopy.Name = strings.Replace(infoCopy.Name, "[...]", "[…]", -1)
+ infoCopy.Name = strings.Replace(infoCopy.Name, ".", `\n`, -1)
+ if infoCopy.File != "" {
+ infoCopy.File = filepath.Base(infoCopy.File)
+ }
+ return strings.Join(infoCopy.NameComponents(), `\n`) + `\n`
+}
+
+// collapsedTags trims and sorts a slice of tags.
+func (b *builder) collapsedTags(ts []*Tag, count int, flatTags bool) []*Tag {
+ ts = SortTags(ts, flatTags)
+ if len(ts) <= count {
+ return ts
+ }
+
+ tagGroups := make([][]*Tag, count)
+ for i, t := range (ts)[:count] {
+ tagGroups[i] = []*Tag{t}
+ }
+ for _, t := range (ts)[count:] {
+ g, d := 0, tagDistance(t, tagGroups[0][0])
+ for i := 1; i < count; i++ {
+ if nd := tagDistance(t, tagGroups[i][0]); nd < d {
+ g, d = i, nd
+ }
+ }
+ tagGroups[g] = append(tagGroups[g], t)
+ }
+
+ var nts []*Tag
+ for _, g := range tagGroups {
+ l, w, c := b.tagGroupLabel(g)
+ nts = append(nts, &Tag{
+ Name: l,
+ Flat: w,
+ Cum: c,
+ })
+ }
+ return SortTags(nts, flatTags)
+}
+
+func tagDistance(t, u *Tag) float64 {
+ v, _ := measurement.Scale(u.Value, u.Unit, t.Unit)
+ if v < float64(t.Value) {
+ return float64(t.Value) - v
+ }
+ return v - float64(t.Value)
+}
+
+func (b *builder) tagGroupLabel(g []*Tag) (label string, flat, cum int64) {
+ if len(g) == 1 {
+ t := g[0]
+ return measurement.Label(t.Value, t.Unit), t.FlatValue(), t.CumValue()
+ }
+ min := g[0]
+ max := g[0]
+ df, f := min.FlatDiv, min.Flat
+ dc, c := min.CumDiv, min.Cum
+ for _, t := range g[1:] {
+ if v, _ := measurement.Scale(t.Value, t.Unit, min.Unit); int64(v) < min.Value {
+ min = t
+ }
+ if v, _ := measurement.Scale(t.Value, t.Unit, max.Unit); int64(v) > max.Value {
+ max = t
+ }
+ f += t.Flat
+ df += t.FlatDiv
+ c += t.Cum
+ dc += t.CumDiv
+ }
+ if df != 0 {
+ f = f / df
+ }
+ if dc != 0 {
+ c = c / dc
+ }
+
+ // Tags are not scaled with the selected output unit because tags are often
+ // much smaller than other values which appear, so the range of tag sizes
+ // sometimes would appear to be "0..0" when scaled to the selected output unit.
+ return measurement.Label(min.Value, min.Unit) + ".." + measurement.Label(max.Value, max.Unit), f, c
+}
+
+func min64(a, b int64) int64 {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+// escapeAllForDot applies escapeForDot to all strings in the given slice.
+func escapeAllForDot(in []string) []string {
+ var out = make([]string, len(in))
+ for i := range in {
+ out[i] = escapeForDot(in[i])
+ }
+ return out
+}
+
+// escapeForDot escapes double quotes and backslashes, and replaces Graphviz's
+// "center" character (\n) with a left-justified character.
+// See https://graphviz.org/docs/attr-types/escString/ for more info.
+func escapeForDot(str string) string {
+ return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(str, `\`, `\\`), `"`, `\"`), "\n", `\l`)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go b/src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go
new file mode 100644
index 0000000..b64ef27
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go
@@ -0,0 +1,1170 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package graph collects a set of samples into a directed graph.
+package graph
+
+import (
+ "fmt"
+ "math"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/profile"
+)
+
+var (
+ // Removes package name and method arguments for Java method names.
+ // See tests for examples.
+ javaRegExp = regexp.MustCompile(`^(?:[a-z]\w*\.)*([A-Z][\w\$]*\.(?:<init>|[a-z][\w\$]*(?:\$\d+)?))(?:(?:\()|$)`)
+ // Removes package name and method arguments for Go function names.
+ // See tests for examples.
+ goRegExp = regexp.MustCompile(`^(?:[\w\-\.]+\/)+([^.]+\..+)`)
+ // Removes potential module versions in a package path.
+ goVerRegExp = regexp.MustCompile(`^(.*?)/v(?:[2-9]|[1-9][0-9]+)([./].*)$`)
+ // Strips C++ namespace prefix from a C++ function / method name.
+ // NOTE: Make sure to keep the template parameters in the name. Normally,
+ // template parameters are stripped from the C++ names but when
+ // -symbolize=demangle=templates flag is used, they will not be.
+ // See tests for examples.
+ cppRegExp = regexp.MustCompile(`^(?:[_a-zA-Z]\w*::)+(_*[A-Z]\w*::~?[_a-zA-Z]\w*(?:<.*>)?)`)
+ cppAnonymousPrefixRegExp = regexp.MustCompile(`^\(anonymous namespace\)::`)
+)
+
+// Graph summarizes a performance profile into a format that is
+// suitable for visualization.
+type Graph struct {
+ Nodes Nodes
+}
+
+// Options encodes the options for constructing a graph
+type Options struct {
+ SampleValue func(s []int64) int64 // Function to compute the value of a sample
+ SampleMeanDivisor func(s []int64) int64 // Function to compute the divisor for mean graphs, or nil
+ FormatTag func(int64, string) string // Function to format a sample tag value into a string
+ ObjNames bool // Always preserve obj filename
+ OrigFnNames bool // Preserve original (eg mangled) function names
+
+ CallTree bool // Build a tree instead of a graph
+ DropNegative bool // Drop nodes with overall negative values
+
+ KeptNodes NodeSet // If non-nil, only use nodes in this set
+}
+
+// Nodes is an ordered collection of graph nodes.
+type Nodes []*Node
+
+// Node is an entry on a profiling report. It represents a unique
+// program location.
+type Node struct {
+ // Info describes the source location associated to this node.
+ Info NodeInfo
+
+ // Function represents the function that this node belongs to. On
+ // graphs with sub-function resolution (eg line number or
+ // addresses), two nodes in a NodeMap that are part of the same
+ // function have the same value of Node.Function. If the Node
+ // represents the whole function, it points back to itself.
+ Function *Node
+
+ // Values associated to this node. Flat is exclusive to this node,
+ // Cum includes all descendents.
+ Flat, FlatDiv, Cum, CumDiv int64
+
+ // In and out Contains the nodes immediately reaching or reached by
+ // this node.
+ In, Out EdgeMap
+
+ // LabelTags provide additional information about subsets of a sample.
+ LabelTags TagMap
+
+ // NumericTags provide additional values for subsets of a sample.
+ // Numeric tags are optionally associated to a label tag. The key
+ // for NumericTags is the name of the LabelTag they are associated
+ // to, or "" for numeric tags not associated to a label tag.
+ NumericTags map[string]TagMap
+}
+
+// FlatValue returns the exclusive value for this node, computing the
+// mean if a divisor is available.
+func (n *Node) FlatValue() int64 {
+ if n.FlatDiv == 0 {
+ return n.Flat
+ }
+ return n.Flat / n.FlatDiv
+}
+
+// CumValue returns the inclusive value for this node, computing the
+// mean if a divisor is available.
+func (n *Node) CumValue() int64 {
+ if n.CumDiv == 0 {
+ return n.Cum
+ }
+ return n.Cum / n.CumDiv
+}
+
+// AddToEdge increases the weight of an edge between two nodes. If
+// there isn't such an edge one is created.
+func (n *Node) AddToEdge(to *Node, v int64, residual, inline bool) {
+ n.AddToEdgeDiv(to, 0, v, residual, inline)
+}
+
+// AddToEdgeDiv increases the weight of an edge between two nodes. If
+// there isn't such an edge one is created.
+func (n *Node) AddToEdgeDiv(to *Node, dv, v int64, residual, inline bool) {
+ if n.Out[to] != to.In[n] {
+ panic(fmt.Errorf("asymmetric edges %v %v", *n, *to))
+ }
+
+ if e := n.Out[to]; e != nil {
+ e.WeightDiv += dv
+ e.Weight += v
+ if residual {
+ e.Residual = true
+ }
+ if !inline {
+ e.Inline = false
+ }
+ return
+ }
+
+ info := &Edge{Src: n, Dest: to, WeightDiv: dv, Weight: v, Residual: residual, Inline: inline}
+ n.Out[to] = info
+ to.In[n] = info
+}
+
+// NodeInfo contains the attributes for a node.
+type NodeInfo struct {
+ Name string
+ OrigName string
+ Address uint64
+ File string
+ StartLine, Lineno int
+ Objfile string
+}
+
+// PrintableName calls the Node's Formatter function with a single space separator.
+func (i *NodeInfo) PrintableName() string {
+ return strings.Join(i.NameComponents(), " ")
+}
+
+// NameComponents returns the components of the printable name to be used for a node.
+func (i *NodeInfo) NameComponents() []string {
+ var name []string
+ if i.Address != 0 {
+ name = append(name, fmt.Sprintf("%016x", i.Address))
+ }
+ if fun := i.Name; fun != "" {
+ name = append(name, fun)
+ }
+
+ switch {
+ case i.Lineno != 0:
+ // User requested line numbers, provide what we have.
+ name = append(name, fmt.Sprintf("%s:%d", i.File, i.Lineno))
+ case i.File != "":
+ // User requested file name, provide it.
+ name = append(name, i.File)
+ case i.Name != "":
+ // User requested function name. It was already included.
+ case i.Objfile != "":
+ // Only binary name is available
+ name = append(name, "["+filepath.Base(i.Objfile)+"]")
+ default:
+ // Do not leave it empty if there is no information at all.
+ name = append(name, "<unknown>")
+ }
+ return name
+}
+
+// NodeMap maps from a node info struct to a node. It is used to merge
+// report entries with the same info.
+type NodeMap map[NodeInfo]*Node
+
+// NodeSet is a collection of node info structs.
+type NodeSet map[NodeInfo]bool
+
+// NodePtrSet is a collection of nodes. Trimming a graph or tree requires a set
+// of objects which uniquely identify the nodes to keep. In a graph, NodeInfo
+// works as a unique identifier; however, in a tree multiple nodes may share
+// identical NodeInfos. A *Node does uniquely identify a node so we can use that
+// instead. Though a *Node also uniquely identifies a node in a graph,
+// currently, during trimming, graphs are rebuilt from scratch using only the
+// NodeSet, so there would not be the required context of the initial graph to
+// allow for the use of *Node.
+type NodePtrSet map[*Node]bool
+
+// FindOrInsertNode takes the info for a node and either returns a matching node
+// from the node map if one exists, or adds one to the map if one does not.
+// If kept is non-nil, nodes are only added if they can be located on it.
+func (nm NodeMap) FindOrInsertNode(info NodeInfo, kept NodeSet) *Node {
+ if kept != nil {
+ if _, ok := kept[info]; !ok {
+ return nil
+ }
+ }
+
+ if n, ok := nm[info]; ok {
+ return n
+ }
+
+ n := &Node{
+ Info: info,
+ In: make(EdgeMap),
+ Out: make(EdgeMap),
+ LabelTags: make(TagMap),
+ NumericTags: make(map[string]TagMap),
+ }
+ nm[info] = n
+ if info.Address == 0 && info.Lineno == 0 {
+ // This node represents the whole function, so point Function
+ // back to itself.
+ n.Function = n
+ return n
+ }
+ // Find a node that represents the whole function.
+ info.Address = 0
+ info.Lineno = 0
+ n.Function = nm.FindOrInsertNode(info, nil)
+ return n
+}
+
+// EdgeMap is used to represent the incoming/outgoing edges from a node.
+type EdgeMap map[*Node]*Edge
+
+// Edge contains any attributes to be represented about edges in a graph.
+type Edge struct {
+ Src, Dest *Node
+ // The summary weight of the edge
+ Weight, WeightDiv int64
+
+ // residual edges connect nodes that were connected through a
+ // separate node, which has been removed from the report.
+ Residual bool
+ // An inline edge represents a call that was inlined into the caller.
+ Inline bool
+}
+
+// WeightValue returns the weight value for this edge, normalizing if a
+// divisor is available.
+func (e *Edge) WeightValue() int64 {
+ if e.WeightDiv == 0 {
+ return e.Weight
+ }
+ return e.Weight / e.WeightDiv
+}
+
+// Tag represent sample annotations
+type Tag struct {
+ Name string
+ Unit string // Describe the value, "" for non-numeric tags
+ Value int64
+ Flat, FlatDiv int64
+ Cum, CumDiv int64
+}
+
+// FlatValue returns the exclusive value for this tag, computing the
+// mean if a divisor is available.
+func (t *Tag) FlatValue() int64 {
+ if t.FlatDiv == 0 {
+ return t.Flat
+ }
+ return t.Flat / t.FlatDiv
+}
+
+// CumValue returns the inclusive value for this tag, computing the
+// mean if a divisor is available.
+func (t *Tag) CumValue() int64 {
+ if t.CumDiv == 0 {
+ return t.Cum
+ }
+ return t.Cum / t.CumDiv
+}
+
+// TagMap is a collection of tags, classified by their name.
+type TagMap map[string]*Tag
+
+// SortTags sorts a slice of tags based on their weight.
+func SortTags(t []*Tag, flat bool) []*Tag {
+ ts := tags{t, flat}
+ sort.Sort(ts)
+ return ts.t
+}
+
+// New summarizes performance data from a profile into a graph.
+func New(prof *profile.Profile, o *Options) *Graph {
+ if o.CallTree {
+ return newTree(prof, o)
+ }
+ g, _ := newGraph(prof, o)
+ return g
+}
+
+// newGraph computes a graph from a profile. It returns the graph, and
+// a map from the profile location indices to the corresponding graph
+// nodes.
+func newGraph(prof *profile.Profile, o *Options) (*Graph, map[uint64]Nodes) {
+ nodes, locationMap := CreateNodes(prof, o)
+ seenNode := make(map[*Node]bool)
+ seenEdge := make(map[nodePair]bool)
+ for _, sample := range prof.Sample {
+ var w, dw int64
+ w = o.SampleValue(sample.Value)
+ if o.SampleMeanDivisor != nil {
+ dw = o.SampleMeanDivisor(sample.Value)
+ }
+ if dw == 0 && w == 0 {
+ continue
+ }
+ for k := range seenNode {
+ delete(seenNode, k)
+ }
+ for k := range seenEdge {
+ delete(seenEdge, k)
+ }
+ var parent *Node
+ // A residual edge goes over one or more nodes that were not kept.
+ residual := false
+
+ labels := joinLabels(sample)
+ // Group the sample frames, based on a global map.
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ l := sample.Location[i]
+ locNodes := locationMap[l.ID]
+ for ni := len(locNodes) - 1; ni >= 0; ni-- {
+ n := locNodes[ni]
+ if n == nil {
+ residual = true
+ continue
+ }
+ // Add cum weight to all nodes in stack, avoiding double counting.
+ if _, ok := seenNode[n]; !ok {
+ seenNode[n] = true
+ n.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, false)
+ }
+ // Update edge weights for all edges in stack, avoiding double counting.
+ if _, ok := seenEdge[nodePair{n, parent}]; !ok && parent != nil && n != parent {
+ seenEdge[nodePair{n, parent}] = true
+ parent.AddToEdgeDiv(n, dw, w, residual, ni != len(locNodes)-1)
+ }
+ parent = n
+ residual = false
+ }
+ }
+ if parent != nil && !residual {
+ // Add flat weight to leaf node.
+ parent.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, true)
+ }
+ }
+
+ return selectNodesForGraph(nodes, o.DropNegative), locationMap
+}
+
+func selectNodesForGraph(nodes Nodes, dropNegative bool) *Graph {
+ // Collect nodes into a graph.
+ gNodes := make(Nodes, 0, len(nodes))
+ for _, n := range nodes {
+ if n == nil {
+ continue
+ }
+ if n.Cum == 0 && n.Flat == 0 {
+ continue
+ }
+ if dropNegative && isNegative(n) {
+ continue
+ }
+ gNodes = append(gNodes, n)
+ }
+ return &Graph{gNodes}
+}
+
+type nodePair struct {
+ src, dest *Node
+}
+
+func newTree(prof *profile.Profile, o *Options) (g *Graph) {
+ parentNodeMap := make(map[*Node]NodeMap, len(prof.Sample))
+ for _, sample := range prof.Sample {
+ var w, dw int64
+ w = o.SampleValue(sample.Value)
+ if o.SampleMeanDivisor != nil {
+ dw = o.SampleMeanDivisor(sample.Value)
+ }
+ if dw == 0 && w == 0 {
+ continue
+ }
+ var parent *Node
+ labels := joinLabels(sample)
+ // Group the sample frames, based on a per-node map.
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ l := sample.Location[i]
+ lines := l.Line
+ if len(lines) == 0 {
+ lines = []profile.Line{{}} // Create empty line to include location info.
+ }
+ for lidx := len(lines) - 1; lidx >= 0; lidx-- {
+ nodeMap := parentNodeMap[parent]
+ if nodeMap == nil {
+ nodeMap = make(NodeMap)
+ parentNodeMap[parent] = nodeMap
+ }
+ n := nodeMap.findOrInsertLine(l, lines[lidx], o)
+ if n == nil {
+ continue
+ }
+ n.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, false)
+ if parent != nil {
+ parent.AddToEdgeDiv(n, dw, w, false, lidx != len(lines)-1)
+ }
+ parent = n
+ }
+ }
+ if parent != nil {
+ parent.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, true)
+ }
+ }
+
+ nodes := make(Nodes, len(prof.Location))
+ for _, nm := range parentNodeMap {
+ nodes = append(nodes, nm.nodes()...)
+ }
+ return selectNodesForGraph(nodes, o.DropNegative)
+}
+
+// ShortenFunctionName returns a shortened version of a function's name.
+func ShortenFunctionName(f string) string {
+ f = cppAnonymousPrefixRegExp.ReplaceAllString(f, "")
+ f = goVerRegExp.ReplaceAllString(f, `${1}${2}`)
+ for _, re := range []*regexp.Regexp{goRegExp, javaRegExp, cppRegExp} {
+ if matches := re.FindStringSubmatch(f); len(matches) >= 2 {
+ return strings.Join(matches[1:], "")
+ }
+ }
+ return f
+}
+
+// TrimTree trims a Graph in forest form, keeping only the nodes in kept. This
+// will not work correctly if even a single node has multiple parents.
+func (g *Graph) TrimTree(kept NodePtrSet) {
+ // Creates a new list of nodes
+ oldNodes := g.Nodes
+ g.Nodes = make(Nodes, 0, len(kept))
+
+ for _, cur := range oldNodes {
+ // A node may not have multiple parents
+ if len(cur.In) > 1 {
+ panic("TrimTree only works on trees")
+ }
+
+ // If a node should be kept, add it to the new list of nodes
+ if _, ok := kept[cur]; ok {
+ g.Nodes = append(g.Nodes, cur)
+ continue
+ }
+
+ // If a node has no parents, then delete all of the in edges of its
+ // children to make them each roots of their own trees.
+ if len(cur.In) == 0 {
+ for _, outEdge := range cur.Out {
+ delete(outEdge.Dest.In, cur)
+ }
+ continue
+ }
+
+ // Get the parent. This works since at this point cur.In must contain only
+ // one element.
+ if len(cur.In) != 1 {
+ panic("Get parent assertion failed. cur.In expected to be of length 1.")
+ }
+ var parent *Node
+ for _, edge := range cur.In {
+ parent = edge.Src
+ }
+
+ parentEdgeInline := parent.Out[cur].Inline
+
+ // Remove the edge from the parent to this node
+ delete(parent.Out, cur)
+
+ // Reconfigure every edge from the current node to now begin at the parent.
+ for _, outEdge := range cur.Out {
+ child := outEdge.Dest
+
+ delete(child.In, cur)
+ child.In[parent] = outEdge
+ parent.Out[child] = outEdge
+
+ outEdge.Src = parent
+ outEdge.Residual = true
+ // If the edge from the parent to the current node and the edge from the
+ // current node to the child are both inline, then this resulting residual
+ // edge should also be inline
+ outEdge.Inline = parentEdgeInline && outEdge.Inline
+ }
+ }
+ g.RemoveRedundantEdges()
+}
+
+func joinLabels(s *profile.Sample) string {
+ if len(s.Label) == 0 {
+ return ""
+ }
+
+ var labels []string
+ for key, vals := range s.Label {
+ for _, v := range vals {
+ labels = append(labels, key+":"+v)
+ }
+ }
+ sort.Strings(labels)
+ return strings.Join(labels, `\n`)
+}
+
+// isNegative returns true if the node is considered as "negative" for the
+// purposes of drop_negative.
+func isNegative(n *Node) bool {
+ switch {
+ case n.Flat < 0:
+ return true
+ case n.Flat == 0 && n.Cum < 0:
+ return true
+ default:
+ return false
+ }
+}
+
+// CreateNodes creates graph nodes for all locations in a profile. It
+// returns set of all nodes, plus a mapping of each location to the
+// set of corresponding nodes (one per location.Line).
+func CreateNodes(prof *profile.Profile, o *Options) (Nodes, map[uint64]Nodes) {
+ locations := make(map[uint64]Nodes, len(prof.Location))
+ nm := make(NodeMap, len(prof.Location))
+ for _, l := range prof.Location {
+ lines := l.Line
+ if len(lines) == 0 {
+ lines = []profile.Line{{}} // Create empty line to include location info.
+ }
+ nodes := make(Nodes, len(lines))
+ for ln := range lines {
+ nodes[ln] = nm.findOrInsertLine(l, lines[ln], o)
+ }
+ locations[l.ID] = nodes
+ }
+ return nm.nodes(), locations
+}
+
+func (nm NodeMap) nodes() Nodes {
+ nodes := make(Nodes, 0, len(nm))
+ for _, n := range nm {
+ nodes = append(nodes, n)
+ }
+ return nodes
+}
+
+func (nm NodeMap) findOrInsertLine(l *profile.Location, li profile.Line, o *Options) *Node {
+ var objfile string
+ if m := l.Mapping; m != nil && m.File != "" {
+ objfile = m.File
+ }
+
+ if ni := nodeInfo(l, li, objfile, o); ni != nil {
+ return nm.FindOrInsertNode(*ni, o.KeptNodes)
+ }
+ return nil
+}
+
+func nodeInfo(l *profile.Location, line profile.Line, objfile string, o *Options) *NodeInfo {
+ if line.Function == nil {
+ return &NodeInfo{Address: l.Address, Objfile: objfile}
+ }
+ ni := &NodeInfo{
+ Address: l.Address,
+ Lineno: int(line.Line),
+ Name: line.Function.Name,
+ }
+ if fname := line.Function.Filename; fname != "" {
+ ni.File = filepath.Clean(fname)
+ }
+ if o.OrigFnNames {
+ ni.OrigName = line.Function.SystemName
+ }
+ if o.ObjNames || (ni.Name == "" && ni.OrigName == "") {
+ ni.Objfile = objfile
+ ni.StartLine = int(line.Function.StartLine)
+ }
+ return ni
+}
+
+type tags struct {
+ t []*Tag
+ flat bool
+}
+
+func (t tags) Len() int { return len(t.t) }
+func (t tags) Swap(i, j int) { t.t[i], t.t[j] = t.t[j], t.t[i] }
+func (t tags) Less(i, j int) bool {
+ if !t.flat {
+ if t.t[i].Cum != t.t[j].Cum {
+ return abs64(t.t[i].Cum) > abs64(t.t[j].Cum)
+ }
+ }
+ if t.t[i].Flat != t.t[j].Flat {
+ return abs64(t.t[i].Flat) > abs64(t.t[j].Flat)
+ }
+ return t.t[i].Name < t.t[j].Name
+}
+
+// Sum adds the flat and cum values of a set of nodes.
+func (ns Nodes) Sum() (flat int64, cum int64) {
+ for _, n := range ns {
+ flat += n.Flat
+ cum += n.Cum
+ }
+ return
+}
+
+func (n *Node) addSample(dw, w int64, labels string, numLabel map[string][]int64, numUnit map[string][]string, format func(int64, string) string, flat bool) {
+ // Update sample value
+ if flat {
+ n.FlatDiv += dw
+ n.Flat += w
+ } else {
+ n.CumDiv += dw
+ n.Cum += w
+ }
+
+ // Add string tags
+ if labels != "" {
+ t := n.LabelTags.findOrAddTag(labels, "", 0)
+ if flat {
+ t.FlatDiv += dw
+ t.Flat += w
+ } else {
+ t.CumDiv += dw
+ t.Cum += w
+ }
+ }
+
+ numericTags := n.NumericTags[labels]
+ if numericTags == nil {
+ numericTags = TagMap{}
+ n.NumericTags[labels] = numericTags
+ }
+ // Add numeric tags
+ if format == nil {
+ format = defaultLabelFormat
+ }
+ for k, nvals := range numLabel {
+ units := numUnit[k]
+ for i, v := range nvals {
+ var t *Tag
+ if len(units) > 0 {
+ t = numericTags.findOrAddTag(format(v, units[i]), units[i], v)
+ } else {
+ t = numericTags.findOrAddTag(format(v, k), k, v)
+ }
+ if flat {
+ t.FlatDiv += dw
+ t.Flat += w
+ } else {
+ t.CumDiv += dw
+ t.Cum += w
+ }
+ }
+ }
+}
+
+func defaultLabelFormat(v int64, key string) string {
+ return strconv.FormatInt(v, 10)
+}
+
+func (m TagMap) findOrAddTag(label, unit string, value int64) *Tag {
+ l := m[label]
+ if l == nil {
+ l = &Tag{
+ Name: label,
+ Unit: unit,
+ Value: value,
+ }
+ m[label] = l
+ }
+ return l
+}
+
+// String returns a text representation of a graph, for debugging purposes.
+func (g *Graph) String() string {
+ var s []string
+
+ nodeIndex := make(map[*Node]int, len(g.Nodes))
+
+ for i, n := range g.Nodes {
+ nodeIndex[n] = i + 1
+ }
+
+ for i, n := range g.Nodes {
+ name := n.Info.PrintableName()
+ var in, out []int
+
+ for _, from := range n.In {
+ in = append(in, nodeIndex[from.Src])
+ }
+ for _, to := range n.Out {
+ out = append(out, nodeIndex[to.Dest])
+ }
+ s = append(s, fmt.Sprintf("%d: %s[flat=%d cum=%d] %x -> %v ", i+1, name, n.Flat, n.Cum, in, out))
+ }
+ return strings.Join(s, "\n")
+}
+
+// DiscardLowFrequencyNodes returns a set of the nodes at or over a
+// specific cum value cutoff.
+func (g *Graph) DiscardLowFrequencyNodes(nodeCutoff int64) NodeSet {
+ return makeNodeSet(g.Nodes, nodeCutoff)
+}
+
+// DiscardLowFrequencyNodePtrs returns a NodePtrSet of nodes at or over a
+// specific cum value cutoff.
+func (g *Graph) DiscardLowFrequencyNodePtrs(nodeCutoff int64) NodePtrSet {
+ cutNodes := getNodesAboveCumCutoff(g.Nodes, nodeCutoff)
+ kept := make(NodePtrSet, len(cutNodes))
+ for _, n := range cutNodes {
+ kept[n] = true
+ }
+ return kept
+}
+
+func makeNodeSet(nodes Nodes, nodeCutoff int64) NodeSet {
+ cutNodes := getNodesAboveCumCutoff(nodes, nodeCutoff)
+ kept := make(NodeSet, len(cutNodes))
+ for _, n := range cutNodes {
+ kept[n.Info] = true
+ }
+ return kept
+}
+
+// getNodesAboveCumCutoff returns all the nodes which have a Cum value greater
+// than or equal to cutoff.
+func getNodesAboveCumCutoff(nodes Nodes, nodeCutoff int64) Nodes {
+ cutoffNodes := make(Nodes, 0, len(nodes))
+ for _, n := range nodes {
+ if abs64(n.Cum) < nodeCutoff {
+ continue
+ }
+ cutoffNodes = append(cutoffNodes, n)
+ }
+ return cutoffNodes
+}
+
+// TrimLowFrequencyTags removes tags that have less than
+// the specified weight.
+func (g *Graph) TrimLowFrequencyTags(tagCutoff int64) {
+ // Remove nodes with value <= total*nodeFraction
+ for _, n := range g.Nodes {
+ n.LabelTags = trimLowFreqTags(n.LabelTags, tagCutoff)
+ for s, nt := range n.NumericTags {
+ n.NumericTags[s] = trimLowFreqTags(nt, tagCutoff)
+ }
+ }
+}
+
+func trimLowFreqTags(tags TagMap, minValue int64) TagMap {
+ kept := TagMap{}
+ for s, t := range tags {
+ if abs64(t.Flat) >= minValue || abs64(t.Cum) >= minValue {
+ kept[s] = t
+ }
+ }
+ return kept
+}
+
+// TrimLowFrequencyEdges removes edges that have less than
+// the specified weight. Returns the number of edges removed
+func (g *Graph) TrimLowFrequencyEdges(edgeCutoff int64) int {
+ var droppedEdges int
+ for _, n := range g.Nodes {
+ for src, e := range n.In {
+ if abs64(e.Weight) < edgeCutoff {
+ delete(n.In, src)
+ delete(src.Out, n)
+ droppedEdges++
+ }
+ }
+ }
+ return droppedEdges
+}
+
+// SortNodes sorts the nodes in a graph based on a specific heuristic.
+func (g *Graph) SortNodes(cum bool, visualMode bool) {
+ // Sort nodes based on requested mode
+ switch {
+ case visualMode:
+ // Specialized sort to produce a more visually-interesting graph
+ g.Nodes.Sort(EntropyOrder)
+ case cum:
+ g.Nodes.Sort(CumNameOrder)
+ default:
+ g.Nodes.Sort(FlatNameOrder)
+ }
+}
+
+// SelectTopNodePtrs returns a set of the top maxNodes *Node in a graph.
+func (g *Graph) SelectTopNodePtrs(maxNodes int, visualMode bool) NodePtrSet {
+ set := make(NodePtrSet)
+ for _, node := range g.selectTopNodes(maxNodes, visualMode) {
+ set[node] = true
+ }
+ return set
+}
+
+// SelectTopNodes returns a set of the top maxNodes nodes in a graph.
+func (g *Graph) SelectTopNodes(maxNodes int, visualMode bool) NodeSet {
+ return makeNodeSet(g.selectTopNodes(maxNodes, visualMode), 0)
+}
+
+// selectTopNodes returns a slice of the top maxNodes nodes in a graph.
+func (g *Graph) selectTopNodes(maxNodes int, visualMode bool) Nodes {
+ if maxNodes > 0 {
+ if visualMode {
+ var count int
+ // If generating a visual graph, count tags as nodes. Update
+ // maxNodes to account for them.
+ for i, n := range g.Nodes {
+ tags := countTags(n)
+ if tags > maxNodelets {
+ tags = maxNodelets
+ }
+ if count += tags + 1; count >= maxNodes {
+ maxNodes = i + 1
+ break
+ }
+ }
+ }
+ }
+ if maxNodes > len(g.Nodes) {
+ maxNodes = len(g.Nodes)
+ }
+ return g.Nodes[:maxNodes]
+}
+
+// countTags counts the tags with flat count. This underestimates the
+// number of tags being displayed, but in practice is close enough.
+func countTags(n *Node) int {
+ count := 0
+ for _, e := range n.LabelTags {
+ if e.Flat != 0 {
+ count++
+ }
+ }
+ for _, t := range n.NumericTags {
+ for _, e := range t {
+ if e.Flat != 0 {
+ count++
+ }
+ }
+ }
+ return count
+}
+
+// RemoveRedundantEdges removes residual edges if the destination can
+// be reached through another path. This is done to simplify the graph
+// while preserving connectivity.
+func (g *Graph) RemoveRedundantEdges() {
+ // Walk the nodes and outgoing edges in reverse order to prefer
+ // removing edges with the lowest weight.
+ for i := len(g.Nodes); i > 0; i-- {
+ n := g.Nodes[i-1]
+ in := n.In.Sort()
+ for j := len(in); j > 0; j-- {
+ e := in[j-1]
+ if !e.Residual {
+ // Do not remove edges heavier than a non-residual edge, to
+ // avoid potential confusion.
+ break
+ }
+ if isRedundantEdge(e) {
+ delete(e.Src.Out, e.Dest)
+ delete(e.Dest.In, e.Src)
+ }
+ }
+ }
+}
+
+// isRedundantEdge determines if there is a path that allows e.Src
+// to reach e.Dest after removing e.
+func isRedundantEdge(e *Edge) bool {
+ src, n := e.Src, e.Dest
+ seen := map[*Node]bool{n: true}
+ queue := Nodes{n}
+ for len(queue) > 0 {
+ n := queue[0]
+ queue = queue[1:]
+ for _, ie := range n.In {
+ if e == ie || seen[ie.Src] {
+ continue
+ }
+ if ie.Src == src {
+ return true
+ }
+ seen[ie.Src] = true
+ queue = append(queue, ie.Src)
+ }
+ }
+ return false
+}
+
+// nodeSorter is a mechanism used to allow a report to be sorted
+// in different ways.
+type nodeSorter struct {
+ rs Nodes
+ less func(l, r *Node) bool
+}
+
+func (s nodeSorter) Len() int { return len(s.rs) }
+func (s nodeSorter) Swap(i, j int) { s.rs[i], s.rs[j] = s.rs[j], s.rs[i] }
+func (s nodeSorter) Less(i, j int) bool { return s.less(s.rs[i], s.rs[j]) }
+
+// Sort reorders a slice of nodes based on the specified ordering
+// criteria. The result is sorted in decreasing order for (absolute)
+// numeric quantities, alphabetically for text, and increasing for
+// addresses.
+func (ns Nodes) Sort(o NodeOrder) error {
+ var s nodeSorter
+
+ switch o {
+ case FlatNameOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv {
+ return iv < jv
+ }
+ if iv, jv := abs64(l.Cum), abs64(r.Cum); iv != jv {
+ return iv > jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case FlatCumNameOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := abs64(l.Cum), abs64(r.Cum); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case NameOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := l.Info.Name, r.Info.Name; iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case FileOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := l.Info.File, r.Info.File; iv != jv {
+ return iv < jv
+ }
+ if iv, jv := l.Info.StartLine, r.Info.StartLine; iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case AddressOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := l.Info.Address, r.Info.Address; iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case CumNameOrder, EntropyOrder:
+ // Hold scoring for score-based ordering
+ var score map[*Node]int64
+ scoreOrder := func(l, r *Node) bool {
+ if iv, jv := abs64(score[l]), abs64(score[r]); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv {
+ return iv < jv
+ }
+ if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv {
+ return iv > jv
+ }
+ return compareNodes(l, r)
+ }
+
+ switch o {
+ case CumNameOrder:
+ score = make(map[*Node]int64, len(ns))
+ for _, n := range ns {
+ score[n] = n.Cum
+ }
+ s = nodeSorter{ns, scoreOrder}
+ case EntropyOrder:
+ score = make(map[*Node]int64, len(ns))
+ for _, n := range ns {
+ score[n] = entropyScore(n)
+ }
+ s = nodeSorter{ns, scoreOrder}
+ }
+ default:
+ return fmt.Errorf("report: unrecognized sort ordering: %d", o)
+ }
+ sort.Sort(s)
+ return nil
+}
+
+// compareNodes compares two nodes to provide a deterministic ordering
+// between them. Two nodes cannot have the same Node.Info value.
+func compareNodes(l, r *Node) bool {
+ return fmt.Sprint(l.Info) < fmt.Sprint(r.Info)
+}
+
+// entropyScore computes a score for a node representing how important
+// it is to include this node on a graph visualization. It is used to
+// sort the nodes and select which ones to display if we have more
+// nodes than desired in the graph. This number is computed by looking
+// at the flat and cum weights of the node and the incoming/outgoing
+// edges. The fundamental idea is to penalize nodes that have a simple
+// fallthrough from their incoming to the outgoing edge.
+func entropyScore(n *Node) int64 {
+ score := float64(0)
+
+ if len(n.In) == 0 {
+ score++ // Favor entry nodes
+ } else {
+ score += edgeEntropyScore(n, n.In, 0)
+ }
+
+ if len(n.Out) == 0 {
+ score++ // Favor leaf nodes
+ } else {
+ score += edgeEntropyScore(n, n.Out, n.Flat)
+ }
+
+ return int64(score*float64(n.Cum)) + n.Flat
+}
+
+// edgeEntropyScore computes the entropy value for a set of edges
+// coming in or out of a node. Entropy (as defined in information
+// theory) refers to the amount of information encoded by the set of
+// edges. A set of edges that have a more interesting distribution of
+// samples gets a higher score.
+func edgeEntropyScore(n *Node, edges EdgeMap, self int64) float64 {
+ score := float64(0)
+ total := self
+ for _, e := range edges {
+ if e.Weight > 0 {
+ total += abs64(e.Weight)
+ }
+ }
+ if total != 0 {
+ for _, e := range edges {
+ frac := float64(abs64(e.Weight)) / float64(total)
+ score += -frac * math.Log2(frac)
+ }
+ if self > 0 {
+ frac := float64(abs64(self)) / float64(total)
+ score += -frac * math.Log2(frac)
+ }
+ }
+ return score
+}
+
+// NodeOrder sets the ordering for a Sort operation
+type NodeOrder int
+
+// Sorting options for node sort.
+const (
+ FlatNameOrder NodeOrder = iota
+ FlatCumNameOrder
+ CumNameOrder
+ NameOrder
+ FileOrder
+ AddressOrder
+ EntropyOrder
+)
+
+// Sort returns a slice of the edges in the map, in a consistent
+// order. The sort order is first based on the edge weight
+// (higher-to-lower) and then by the node names to avoid flakiness.
+func (e EdgeMap) Sort() []*Edge {
+ el := make(edgeList, 0, len(e))
+ for _, w := range e {
+ el = append(el, w)
+ }
+
+ sort.Sort(el)
+ return el
+}
+
+// Sum returns the total weight for a set of nodes.
+func (e EdgeMap) Sum() int64 {
+ var ret int64
+ for _, edge := range e {
+ ret += edge.Weight
+ }
+ return ret
+}
+
+type edgeList []*Edge
+
+func (el edgeList) Len() int {
+ return len(el)
+}
+
+func (el edgeList) Less(i, j int) bool {
+ if el[i].Weight != el[j].Weight {
+ return abs64(el[i].Weight) > abs64(el[j].Weight)
+ }
+
+ from1 := el[i].Src.Info.PrintableName()
+ from2 := el[j].Src.Info.PrintableName()
+ if from1 != from2 {
+ return from1 < from2
+ }
+
+ to1 := el[i].Dest.Info.PrintableName()
+ to2 := el[j].Dest.Info.PrintableName()
+
+ return to1 < to2
+}
+
+func (el edgeList) Swap(i, j int) {
+ el[i], el[j] = el[j], el[i]
+}
+
+func abs64(i int64) int64 {
+ if i < 0 {
+ return -i
+ }
+ return i
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go b/src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go
new file mode 100644
index 0000000..d9644f9
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go
@@ -0,0 +1,293 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package measurement export utility functions to manipulate/format performance profile sample values.
+package measurement
+
+import (
+ "fmt"
+ "math"
+ "strings"
+ "time"
+
+ "github.com/google/pprof/profile"
+)
+
+// ScaleProfiles updates the units in a set of profiles to make them
+// compatible. It scales the profiles to the smallest unit to preserve
+// data.
+func ScaleProfiles(profiles []*profile.Profile) error {
+ if len(profiles) == 0 {
+ return nil
+ }
+ periodTypes := make([]*profile.ValueType, 0, len(profiles))
+ for _, p := range profiles {
+ if p.PeriodType != nil {
+ periodTypes = append(periodTypes, p.PeriodType)
+ }
+ }
+ periodType, err := CommonValueType(periodTypes)
+ if err != nil {
+ return fmt.Errorf("period type: %v", err)
+ }
+
+ // Identify common sample types
+ numSampleTypes := len(profiles[0].SampleType)
+ for _, p := range profiles[1:] {
+ if numSampleTypes != len(p.SampleType) {
+ return fmt.Errorf("inconsistent samples type count: %d != %d", numSampleTypes, len(p.SampleType))
+ }
+ }
+ sampleType := make([]*profile.ValueType, numSampleTypes)
+ for i := 0; i < numSampleTypes; i++ {
+ sampleTypes := make([]*profile.ValueType, len(profiles))
+ for j, p := range profiles {
+ sampleTypes[j] = p.SampleType[i]
+ }
+ sampleType[i], err = CommonValueType(sampleTypes)
+ if err != nil {
+ return fmt.Errorf("sample types: %v", err)
+ }
+ }
+
+ for _, p := range profiles {
+ if p.PeriodType != nil && periodType != nil {
+ period, _ := Scale(p.Period, p.PeriodType.Unit, periodType.Unit)
+ p.Period, p.PeriodType.Unit = int64(period), periodType.Unit
+ }
+ ratios := make([]float64, len(p.SampleType))
+ for i, st := range p.SampleType {
+ if sampleType[i] == nil {
+ ratios[i] = 1
+ continue
+ }
+ ratios[i], _ = Scale(1, st.Unit, sampleType[i].Unit)
+ p.SampleType[i].Unit = sampleType[i].Unit
+ }
+ if err := p.ScaleN(ratios); err != nil {
+ return fmt.Errorf("scale: %v", err)
+ }
+ }
+ return nil
+}
+
+// CommonValueType returns the finest type from a set of compatible
+// types.
+func CommonValueType(ts []*profile.ValueType) (*profile.ValueType, error) {
+ if len(ts) <= 1 {
+ return nil, nil
+ }
+ minType := ts[0]
+ for _, t := range ts[1:] {
+ if !compatibleValueTypes(minType, t) {
+ return nil, fmt.Errorf("incompatible types: %v %v", *minType, *t)
+ }
+ if ratio, _ := Scale(1, t.Unit, minType.Unit); ratio < 1 {
+ minType = t
+ }
+ }
+ rcopy := *minType
+ return &rcopy, nil
+}
+
+func compatibleValueTypes(v1, v2 *profile.ValueType) bool {
+ if v1 == nil || v2 == nil {
+ return true // No grounds to disqualify.
+ }
+ // Remove trailing 's' to permit minor mismatches.
+ if t1, t2 := strings.TrimSuffix(v1.Type, "s"), strings.TrimSuffix(v2.Type, "s"); t1 != t2 {
+ return false
+ }
+
+ if v1.Unit == v2.Unit {
+ return true
+ }
+ for _, ut := range unitTypes {
+ if ut.sniffUnit(v1.Unit) != nil && ut.sniffUnit(v2.Unit) != nil {
+ return true
+ }
+ }
+ return false
+}
+
+// Scale a measurement from a unit to a different unit and returns
+// the scaled value and the target unit. The returned target unit
+// will be empty if uninteresting (could be skipped).
+func Scale(value int64, fromUnit, toUnit string) (float64, string) {
+ // Avoid infinite recursion on overflow.
+ if value < 0 && -value > 0 {
+ v, u := Scale(-value, fromUnit, toUnit)
+ return -v, u
+ }
+ for _, ut := range unitTypes {
+ if v, u, ok := ut.convertUnit(value, fromUnit, toUnit); ok {
+ return v, u
+ }
+ }
+ // Skip non-interesting units.
+ switch toUnit {
+ case "count", "sample", "unit", "minimum", "auto":
+ return float64(value), ""
+ default:
+ return float64(value), toUnit
+ }
+}
+
+// Label returns the label used to describe a certain measurement.
+func Label(value int64, unit string) string {
+ return ScaledLabel(value, unit, "auto")
+}
+
+// ScaledLabel scales the passed-in measurement (if necessary) and
+// returns the label used to describe a float measurement.
+func ScaledLabel(value int64, fromUnit, toUnit string) string {
+ v, u := Scale(value, fromUnit, toUnit)
+ sv := strings.TrimSuffix(fmt.Sprintf("%.2f", v), ".00")
+ if sv == "0" || sv == "-0" {
+ return "0"
+ }
+ return sv + u
+}
+
+// Percentage computes the percentage of total of a value, and encodes
+// it as a string. At least two digits of precision are printed.
+func Percentage(value, total int64) string {
+ var ratio float64
+ if total != 0 {
+ ratio = math.Abs(float64(value)/float64(total)) * 100
+ }
+ switch {
+ case math.Abs(ratio) >= 99.95 && math.Abs(ratio) <= 100.05:
+ return " 100%"
+ case math.Abs(ratio) >= 1.0:
+ return fmt.Sprintf("%5.2f%%", ratio)
+ default:
+ return fmt.Sprintf("%5.2g%%", ratio)
+ }
+}
+
+// unit includes a list of aliases representing a specific unit and a factor
+// which one can multiple a value in the specified unit by to get the value
+// in terms of the base unit.
+type unit struct {
+ canonicalName string
+ aliases []string
+ factor float64
+}
+
+// unitType includes a list of units that are within the same category (i.e.
+// memory or time units) and a default unit to use for this type of unit.
+type unitType struct {
+ defaultUnit unit
+ units []unit
+}
+
+// findByAlias returns the unit associated with the specified alias. It returns
+// nil if the unit with such alias is not found.
+func (ut unitType) findByAlias(alias string) *unit {
+ for _, u := range ut.units {
+ for _, a := range u.aliases {
+ if alias == a {
+ return &u
+ }
+ }
+ }
+ return nil
+}
+
+// sniffUnit simpifies the input alias and returns the unit associated with the
+// specified alias. It returns nil if the unit with such alias is not found.
+func (ut unitType) sniffUnit(unit string) *unit {
+ unit = strings.ToLower(unit)
+ if len(unit) > 2 {
+ unit = strings.TrimSuffix(unit, "s")
+ }
+ return ut.findByAlias(unit)
+}
+
+// autoScale takes in the value with units of the base unit and returns
+// that value scaled to a reasonable unit if a reasonable unit is
+// found.
+func (ut unitType) autoScale(value float64) (float64, string, bool) {
+ var f float64
+ var unit string
+ for _, u := range ut.units {
+ if u.factor >= f && (value/u.factor) >= 1.0 {
+ f = u.factor
+ unit = u.canonicalName
+ }
+ }
+ if f == 0 {
+ return 0, "", false
+ }
+ return value / f, unit, true
+}
+
+// convertUnit converts a value from the fromUnit to the toUnit, autoscaling
+// the value if the toUnit is "minimum" or "auto". If the fromUnit is not
+// included in the unitType, then a false boolean will be returned. If the
+// toUnit is not in the unitType, the value will be returned in terms of the
+// default unitType.
+func (ut unitType) convertUnit(value int64, fromUnitStr, toUnitStr string) (float64, string, bool) {
+ fromUnit := ut.sniffUnit(fromUnitStr)
+ if fromUnit == nil {
+ return 0, "", false
+ }
+ v := float64(value) * fromUnit.factor
+ if toUnitStr == "minimum" || toUnitStr == "auto" {
+ if v, u, ok := ut.autoScale(v); ok {
+ return v, u, true
+ }
+ return v / ut.defaultUnit.factor, ut.defaultUnit.canonicalName, true
+ }
+ toUnit := ut.sniffUnit(toUnitStr)
+ if toUnit == nil {
+ return v / ut.defaultUnit.factor, ut.defaultUnit.canonicalName, true
+ }
+ return v / toUnit.factor, toUnit.canonicalName, true
+}
+
+var unitTypes = []unitType{{
+ units: []unit{
+ {"B", []string{"b", "byte"}, 1},
+ {"kB", []string{"kb", "kbyte", "kilobyte"}, float64(1 << 10)},
+ {"MB", []string{"mb", "mbyte", "megabyte"}, float64(1 << 20)},
+ {"GB", []string{"gb", "gbyte", "gigabyte"}, float64(1 << 30)},
+ {"TB", []string{"tb", "tbyte", "terabyte"}, float64(1 << 40)},
+ {"PB", []string{"pb", "pbyte", "petabyte"}, float64(1 << 50)},
+ },
+ defaultUnit: unit{"B", []string{"b", "byte"}, 1},
+}, {
+ units: []unit{
+ {"ns", []string{"ns", "nanosecond"}, float64(time.Nanosecond)},
+ {"us", []string{"μs", "us", "microsecond"}, float64(time.Microsecond)},
+ {"ms", []string{"ms", "millisecond"}, float64(time.Millisecond)},
+ {"s", []string{"s", "sec", "second"}, float64(time.Second)},
+ {"hrs", []string{"hour", "hr"}, float64(time.Hour)},
+ },
+ defaultUnit: unit{"s", []string{}, float64(time.Second)},
+}, {
+ units: []unit{
+ {"n*GCU", []string{"nanogcu"}, 1e-9},
+ {"u*GCU", []string{"microgcu"}, 1e-6},
+ {"m*GCU", []string{"milligcu"}, 1e-3},
+ {"GCU", []string{"gcu"}, 1},
+ {"k*GCU", []string{"kilogcu"}, 1e3},
+ {"M*GCU", []string{"megagcu"}, 1e6},
+ {"G*GCU", []string{"gigagcu"}, 1e9},
+ {"T*GCU", []string{"teragcu"}, 1e12},
+ {"P*GCU", []string{"petagcu"}, 1e15},
+ },
+ defaultUnit: unit{"GCU", []string{}, 1.0},
+}}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go b/src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go
new file mode 100644
index 0000000..98eb1dd
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go
@@ -0,0 +1,216 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package plugin defines the plugin implementations that the main pprof driver requires.
+package plugin
+
+import (
+ "io"
+ "net/http"
+ "regexp"
+ "time"
+
+ "github.com/google/pprof/profile"
+)
+
+// Options groups all the optional plugins into pprof.
+type Options struct {
+ Writer Writer
+ Flagset FlagSet
+ Fetch Fetcher
+ Sym Symbolizer
+ Obj ObjTool
+ UI UI
+
+ // HTTPServer is a function that should block serving http requests,
+ // including the handlers specified in args. If non-nil, pprof will
+ // invoke this function if necessary to provide a web interface.
+ //
+ // If HTTPServer is nil, pprof will use its own internal HTTP server.
+ //
+ // A common use for a custom HTTPServer is to provide custom
+ // authentication checks.
+ HTTPServer func(args *HTTPServerArgs) error
+ HTTPTransport http.RoundTripper
+}
+
+// Writer provides a mechanism to write data under a certain name,
+// typically a filename.
+type Writer interface {
+ Open(name string) (io.WriteCloser, error)
+}
+
+// A FlagSet creates and parses command-line flags.
+// It is similar to the standard flag.FlagSet.
+type FlagSet interface {
+ // Bool, Int, Float64, and String define new flags,
+ // like the functions of the same name in package flag.
+ Bool(name string, def bool, usage string) *bool
+ Int(name string, def int, usage string) *int
+ Float64(name string, def float64, usage string) *float64
+ String(name string, def string, usage string) *string
+
+ // StringList is similar to String but allows multiple values for a
+ // single flag
+ StringList(name string, def string, usage string) *[]*string
+
+ // ExtraUsage returns any additional text that should be printed after the
+ // standard usage message. The extra usage message returned includes all text
+ // added with AddExtraUsage().
+ // The typical use of ExtraUsage is to show any custom flags defined by the
+ // specific pprof plugins being used.
+ ExtraUsage() string
+
+ // AddExtraUsage appends additional text to the end of the extra usage message.
+ AddExtraUsage(eu string)
+
+ // Parse initializes the flags with their values for this run
+ // and returns the non-flag command line arguments.
+ // If an unknown flag is encountered or there are no arguments,
+ // Parse should call usage and return nil.
+ Parse(usage func()) []string
+}
+
+// A Fetcher reads and returns the profile named by src. src can be a
+// local file path or a URL. duration and timeout are units specified
+// by the end user, or 0 by default. duration refers to the length of
+// the profile collection, if applicable, and timeout is the amount of
+// time to wait for a profile before returning an error. Returns the
+// fetched profile, the URL of the actual source of the profile, or an
+// error.
+type Fetcher interface {
+ Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error)
+}
+
+// A Symbolizer introduces symbol information into a profile.
+type Symbolizer interface {
+ Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error
+}
+
+// MappingSources map each profile.Mapping to the source of the profile.
+// The key is either Mapping.File or Mapping.BuildId.
+type MappingSources map[string][]struct {
+ Source string // URL of the source the mapping was collected from
+ Start uint64 // delta applied to addresses from this source (to represent Merge adjustments)
+}
+
+// An ObjTool inspects shared libraries and executable files.
+type ObjTool interface {
+ // Open opens the named object file. If the object is a shared
+ // library, start/limit/offset are the addresses where it is mapped
+ // into memory in the address space being inspected. If the object
+ // is a linux kernel, relocationSymbol is the name of the symbol
+ // corresponding to the start address.
+ Open(file string, start, limit, offset uint64, relocationSymbol string) (ObjFile, error)
+
+ // Disasm disassembles the named object file, starting at
+ // the start address and stopping at (before) the end address.
+ Disasm(file string, start, end uint64, intelSyntax bool) ([]Inst, error)
+}
+
+// An Inst is a single instruction in an assembly listing.
+type Inst struct {
+ Addr uint64 // virtual address of instruction
+ Text string // instruction text
+ Function string // function name
+ File string // source file
+ Line int // source line
+}
+
+// An ObjFile is a single object file: a shared library or executable.
+type ObjFile interface {
+ // Name returns the underlyinf file name, if available
+ Name() string
+
+ // ObjAddr returns the objdump (linker) address corresponding to a runtime
+ // address, and an error.
+ ObjAddr(addr uint64) (uint64, error)
+
+ // BuildID returns the GNU build ID of the file, or an empty string.
+ BuildID() string
+
+ // SourceLine reports the source line information for a given
+ // address in the file. Due to inlining, the source line information
+ // is in general a list of positions representing a call stack,
+ // with the leaf function first.
+ SourceLine(addr uint64) ([]Frame, error)
+
+ // Symbols returns a list of symbols in the object file.
+ // If r is not nil, Symbols restricts the list to symbols
+ // with names matching the regular expression.
+ // If addr is not zero, Symbols restricts the list to symbols
+ // containing that address.
+ Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error)
+
+ // Close closes the file, releasing associated resources.
+ Close() error
+}
+
+// A Frame describes a single line in a source file.
+type Frame struct {
+ Func string // name of function
+ File string // source file name
+ Line int // line in file
+}
+
+// A Sym describes a single symbol in an object file.
+type Sym struct {
+ Name []string // names of symbol (many if symbol was dedup'ed)
+ File string // object file containing symbol
+ Start uint64 // start virtual address
+ End uint64 // virtual address of last byte in sym (Start+size-1)
+}
+
+// A UI manages user interactions.
+type UI interface {
+ // Read returns a line of text (a command) read from the user.
+ // prompt is printed before reading the command.
+ ReadLine(prompt string) (string, error)
+
+ // Print shows a message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, Print writes to standard error.
+ // (Standard output is reserved for report data.)
+ Print(...interface{})
+
+ // PrintErr shows an error message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, PrintErr writes to standard error.
+ PrintErr(...interface{})
+
+ // IsTerminal returns whether the UI is known to be tied to an
+ // interactive terminal (as opposed to being redirected to a file).
+ IsTerminal() bool
+
+ // WantBrowser indicates whether a browser should be opened with the -http option.
+ WantBrowser() bool
+
+ // SetAutoComplete instructs the UI to call complete(cmd) to obtain
+ // the auto-completion of cmd, if the UI supports auto-completion at all.
+ SetAutoComplete(complete func(string) string)
+}
+
+// HTTPServerArgs contains arguments needed by an HTTP server that
+// is exporting a pprof web interface.
+type HTTPServerArgs struct {
+ // Hostport contains the http server address (derived from flags).
+ Hostport string
+
+ Host string // Host portion of Hostport
+ Port int // Port portion of Hostport
+
+ // Handlers maps from URL paths to the handler to invoke to
+ // serve that path.
+ Handlers map[string]http.Handler
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/package.go b/src/cmd/vendor/github.com/google/pprof/internal/report/package.go
new file mode 100644
index 0000000..0f6dcf5
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/package.go
@@ -0,0 +1,17 @@
+package report
+
+import "regexp"
+
+// pkgRE extracts package name, It looks for the first "." or "::" that occurs
+// after the last "/". (Searching after the last / allows us to correctly handle
+// names that look like "some.url.com/foo.bar".)
+var pkgRE = regexp.MustCompile(`^((.*/)?[\w\d_]+)(\.|::)([^/]*)$`)
+
+// packageName returns the package name of the named symbol, or "" if not found.
+func packageName(name string) string {
+ m := pkgRE.FindStringSubmatch(name)
+ if m == nil {
+ return ""
+ }
+ return m[1]
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/report.go b/src/cmd/vendor/github.com/google/pprof/internal/report/report.go
new file mode 100644
index 0000000..f73e49a
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/report.go
@@ -0,0 +1,1334 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package report summarizes a performance profile into a
+// human-readable report.
+package report
+
+import (
+ "fmt"
+ "io"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "text/tabwriter"
+ "time"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// Output formats.
+const (
+ Callgrind = iota
+ Comments
+ Dis
+ Dot
+ List
+ Proto
+ Raw
+ Tags
+ Text
+ TopProto
+ Traces
+ Tree
+ WebList
+)
+
+// Options are the formatting and filtering options used to generate a
+// profile.
+type Options struct {
+ OutputFormat int
+
+ CumSort bool
+ CallTree bool
+ DropNegative bool
+ CompactLabels bool
+ Ratio float64
+ Title string
+ ProfileLabels []string
+ ActiveFilters []string
+ NumLabelUnits map[string]string
+
+ NodeCount int
+ NodeFraction float64
+ EdgeFraction float64
+
+ SampleValue func(s []int64) int64
+ SampleMeanDivisor func(s []int64) int64
+ SampleType string
+ SampleUnit string // Unit for the sample data from the profile.
+
+ OutputUnit string // Units for data formatting in report.
+
+ Symbol *regexp.Regexp // Symbols to include on disassembly report.
+ SourcePath string // Search path for source files.
+ TrimPath string // Paths to trim from source file paths.
+
+ IntelSyntax bool // Whether or not to print assembly in Intel syntax.
+}
+
+// Generate generates a report as directed by the Report.
+func Generate(w io.Writer, rpt *Report, obj plugin.ObjTool) error {
+ o := rpt.options
+
+ switch o.OutputFormat {
+ case Comments:
+ return printComments(w, rpt)
+ case Dot:
+ return printDOT(w, rpt)
+ case Tree:
+ return printTree(w, rpt)
+ case Text:
+ return printText(w, rpt)
+ case Traces:
+ return printTraces(w, rpt)
+ case Raw:
+ fmt.Fprint(w, rpt.prof.String())
+ return nil
+ case Tags:
+ return printTags(w, rpt)
+ case Proto:
+ return printProto(w, rpt)
+ case TopProto:
+ return printTopProto(w, rpt)
+ case Dis:
+ return printAssembly(w, rpt, obj)
+ case List:
+ return printSource(w, rpt)
+ case WebList:
+ return printWebSource(w, rpt, obj)
+ case Callgrind:
+ return printCallgrind(w, rpt)
+ }
+ return fmt.Errorf("unexpected output format")
+}
+
+// newTrimmedGraph creates a graph for this report, trimmed according
+// to the report options.
+func (rpt *Report) newTrimmedGraph() (g *graph.Graph, origCount, droppedNodes, droppedEdges int) {
+ o := rpt.options
+
+ // Build a graph and refine it. On each refinement step we must rebuild the graph from the samples,
+ // as the graph itself doesn't contain enough information to preserve full precision.
+ visualMode := o.OutputFormat == Dot
+ cumSort := o.CumSort
+
+ // The call_tree option is only honored when generating visual representations of the callgraph.
+ callTree := o.CallTree && (o.OutputFormat == Dot || o.OutputFormat == Callgrind)
+
+ // First step: Build complete graph to identify low frequency nodes, based on their cum weight.
+ g = rpt.newGraph(nil)
+ totalValue, _ := g.Nodes.Sum()
+ nodeCutoff := abs64(int64(float64(totalValue) * o.NodeFraction))
+ edgeCutoff := abs64(int64(float64(totalValue) * o.EdgeFraction))
+
+ // Filter out nodes with cum value below nodeCutoff.
+ if nodeCutoff > 0 {
+ if callTree {
+ if nodesKept := g.DiscardLowFrequencyNodePtrs(nodeCutoff); len(g.Nodes) != len(nodesKept) {
+ droppedNodes = len(g.Nodes) - len(nodesKept)
+ g.TrimTree(nodesKept)
+ }
+ } else {
+ if nodesKept := g.DiscardLowFrequencyNodes(nodeCutoff); len(g.Nodes) != len(nodesKept) {
+ droppedNodes = len(g.Nodes) - len(nodesKept)
+ g = rpt.newGraph(nodesKept)
+ }
+ }
+ }
+ origCount = len(g.Nodes)
+
+ // Second step: Limit the total number of nodes. Apply specialized heuristics to improve
+ // visualization when generating dot output.
+ g.SortNodes(cumSort, visualMode)
+ if nodeCount := o.NodeCount; nodeCount > 0 {
+ // Remove low frequency tags and edges as they affect selection.
+ g.TrimLowFrequencyTags(nodeCutoff)
+ g.TrimLowFrequencyEdges(edgeCutoff)
+ if callTree {
+ if nodesKept := g.SelectTopNodePtrs(nodeCount, visualMode); len(g.Nodes) != len(nodesKept) {
+ g.TrimTree(nodesKept)
+ g.SortNodes(cumSort, visualMode)
+ }
+ } else {
+ if nodesKept := g.SelectTopNodes(nodeCount, visualMode); len(g.Nodes) != len(nodesKept) {
+ g = rpt.newGraph(nodesKept)
+ g.SortNodes(cumSort, visualMode)
+ }
+ }
+ }
+
+ // Final step: Filter out low frequency tags and edges, and remove redundant edges that clutter
+ // the graph.
+ g.TrimLowFrequencyTags(nodeCutoff)
+ droppedEdges = g.TrimLowFrequencyEdges(edgeCutoff)
+ if visualMode {
+ g.RemoveRedundantEdges()
+ }
+ return
+}
+
+func (rpt *Report) selectOutputUnit(g *graph.Graph) {
+ o := rpt.options
+
+ // Select best unit for profile output.
+ // Find the appropriate units for the smallest non-zero sample
+ if o.OutputUnit != "minimum" || len(g.Nodes) == 0 {
+ return
+ }
+ var minValue int64
+
+ for _, n := range g.Nodes {
+ nodeMin := abs64(n.FlatValue())
+ if nodeMin == 0 {
+ nodeMin = abs64(n.CumValue())
+ }
+ if nodeMin > 0 && (minValue == 0 || nodeMin < minValue) {
+ minValue = nodeMin
+ }
+ }
+ maxValue := rpt.total
+ if minValue == 0 {
+ minValue = maxValue
+ }
+
+ if r := o.Ratio; r > 0 && r != 1 {
+ minValue = int64(float64(minValue) * r)
+ maxValue = int64(float64(maxValue) * r)
+ }
+
+ _, minUnit := measurement.Scale(minValue, o.SampleUnit, "minimum")
+ _, maxUnit := measurement.Scale(maxValue, o.SampleUnit, "minimum")
+
+ unit := minUnit
+ if minUnit != maxUnit && minValue*100 < maxValue && o.OutputFormat != Callgrind {
+ // Minimum and maximum values have different units. Scale
+ // minimum by 100 to use larger units, allowing minimum value to
+ // be scaled down to 0.01, except for callgrind reports since
+ // they can only represent integer values.
+ _, unit = measurement.Scale(100*minValue, o.SampleUnit, "minimum")
+ }
+
+ if unit != "" {
+ o.OutputUnit = unit
+ } else {
+ o.OutputUnit = o.SampleUnit
+ }
+}
+
+// newGraph creates a new graph for this report. If nodes is non-nil,
+// only nodes whose info matches are included. Otherwise, all nodes
+// are included, without trimming.
+func (rpt *Report) newGraph(nodes graph.NodeSet) *graph.Graph {
+ o := rpt.options
+
+ // Clean up file paths using heuristics.
+ prof := rpt.prof
+ for _, f := range prof.Function {
+ f.Filename = trimPath(f.Filename, o.TrimPath, o.SourcePath)
+ }
+ // Removes all numeric tags except for the bytes tag prior
+ // to making graph.
+ // TODO: modify to select first numeric tag if no bytes tag
+ for _, s := range prof.Sample {
+ numLabels := make(map[string][]int64, len(s.NumLabel))
+ numUnits := make(map[string][]string, len(s.NumLabel))
+ for k, vs := range s.NumLabel {
+ if k == "bytes" {
+ unit := o.NumLabelUnits[k]
+ numValues := make([]int64, len(vs))
+ numUnit := make([]string, len(vs))
+ for i, v := range vs {
+ numValues[i] = v
+ numUnit[i] = unit
+ }
+ numLabels[k] = append(numLabels[k], numValues...)
+ numUnits[k] = append(numUnits[k], numUnit...)
+ }
+ }
+ s.NumLabel = numLabels
+ s.NumUnit = numUnits
+ }
+
+ // Remove label marking samples from the base profiles, so it does not appear
+ // as a nodelet in the graph view.
+ prof.RemoveLabel("pprof::base")
+
+ formatTag := func(v int64, key string) string {
+ return measurement.ScaledLabel(v, key, o.OutputUnit)
+ }
+
+ gopt := &graph.Options{
+ SampleValue: o.SampleValue,
+ SampleMeanDivisor: o.SampleMeanDivisor,
+ FormatTag: formatTag,
+ CallTree: o.CallTree && (o.OutputFormat == Dot || o.OutputFormat == Callgrind),
+ DropNegative: o.DropNegative,
+ KeptNodes: nodes,
+ }
+
+ // Only keep binary names for disassembly-based reports, otherwise
+ // remove it to allow merging of functions across binaries.
+ switch o.OutputFormat {
+ case Raw, List, WebList, Dis, Callgrind:
+ gopt.ObjNames = true
+ }
+
+ return graph.New(rpt.prof, gopt)
+}
+
+// printProto writes the incoming proto via thw writer w.
+// If the divide_by option has been specified, samples are scaled appropriately.
+func printProto(w io.Writer, rpt *Report) error {
+ p, o := rpt.prof, rpt.options
+
+ // Apply the sample ratio to all samples before saving the profile.
+ if r := o.Ratio; r > 0 && r != 1 {
+ for _, sample := range p.Sample {
+ for i, v := range sample.Value {
+ sample.Value[i] = int64(float64(v) * r)
+ }
+ }
+ }
+ return p.Write(w)
+}
+
+// printTopProto writes a list of the hottest routines in a profile as a profile.proto.
+func printTopProto(w io.Writer, rpt *Report) error {
+ p := rpt.prof
+ o := rpt.options
+ g, _, _, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+
+ out := profile.Profile{
+ SampleType: []*profile.ValueType{
+ {Type: "cum", Unit: o.OutputUnit},
+ {Type: "flat", Unit: o.OutputUnit},
+ },
+ TimeNanos: p.TimeNanos,
+ DurationNanos: p.DurationNanos,
+ PeriodType: p.PeriodType,
+ Period: p.Period,
+ }
+ functionMap := make(functionMap)
+ for i, n := range g.Nodes {
+ f, added := functionMap.findOrAdd(n.Info)
+ if added {
+ out.Function = append(out.Function, f)
+ }
+ flat, cum := n.FlatValue(), n.CumValue()
+ l := &profile.Location{
+ ID: uint64(i + 1),
+ Address: n.Info.Address,
+ Line: []profile.Line{
+ {
+ Line: int64(n.Info.Lineno),
+ Function: f,
+ },
+ },
+ }
+
+ fv, _ := measurement.Scale(flat, o.SampleUnit, o.OutputUnit)
+ cv, _ := measurement.Scale(cum, o.SampleUnit, o.OutputUnit)
+ s := &profile.Sample{
+ Location: []*profile.Location{l},
+ Value: []int64{int64(cv), int64(fv)},
+ }
+ out.Location = append(out.Location, l)
+ out.Sample = append(out.Sample, s)
+ }
+
+ return out.Write(w)
+}
+
+type functionMap map[string]*profile.Function
+
+// findOrAdd takes a node representing a function, adds the function
+// represented by the node to the map if the function is not already present,
+// and returns the function the node represents. This also returns a boolean,
+// which is true if the function was added and false otherwise.
+func (fm functionMap) findOrAdd(ni graph.NodeInfo) (*profile.Function, bool) {
+ fName := fmt.Sprintf("%q%q%q%d", ni.Name, ni.OrigName, ni.File, ni.StartLine)
+
+ if f := fm[fName]; f != nil {
+ return f, false
+ }
+
+ f := &profile.Function{
+ ID: uint64(len(fm) + 1),
+ Name: ni.Name,
+ SystemName: ni.OrigName,
+ Filename: ni.File,
+ StartLine: int64(ni.StartLine),
+ }
+ fm[fName] = f
+ return f, true
+}
+
+// printAssembly prints an annotated assembly listing.
+func printAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool) error {
+ return PrintAssembly(w, rpt, obj, -1)
+}
+
+// PrintAssembly prints annotated disassembly of rpt to w.
+func PrintAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool, maxFuncs int) error {
+ o := rpt.options
+ prof := rpt.prof
+
+ g := rpt.newGraph(nil)
+
+ // If the regexp source can be parsed as an address, also match
+ // functions that land on that address.
+ var address *uint64
+ if hex, err := strconv.ParseUint(o.Symbol.String(), 0, 64); err == nil {
+ address = &hex
+ }
+
+ fmt.Fprintln(w, "Total:", rpt.formatValue(rpt.total))
+ symbols := symbolsFromBinaries(prof, g, o.Symbol, address, obj)
+ symNodes := nodesPerSymbol(g.Nodes, symbols)
+
+ // Sort for printing.
+ var syms []*objSymbol
+ for s := range symNodes {
+ syms = append(syms, s)
+ }
+ byName := func(a, b *objSymbol) bool {
+ if na, nb := a.sym.Name[0], b.sym.Name[0]; na != nb {
+ return na < nb
+ }
+ return a.sym.Start < b.sym.Start
+ }
+ if maxFuncs < 0 {
+ sort.Sort(orderSyms{syms, byName})
+ } else {
+ byFlatSum := func(a, b *objSymbol) bool {
+ suma, _ := symNodes[a].Sum()
+ sumb, _ := symNodes[b].Sum()
+ if suma != sumb {
+ return suma > sumb
+ }
+ return byName(a, b)
+ }
+ sort.Sort(orderSyms{syms, byFlatSum})
+ if len(syms) > maxFuncs {
+ syms = syms[:maxFuncs]
+ }
+ }
+
+ if len(syms) == 0 {
+ // The symbol regexp case
+ if address == nil {
+ return fmt.Errorf("no matches found for regexp %s", o.Symbol)
+ }
+
+ // The address case
+ if len(symbols) == 0 {
+ return fmt.Errorf("no matches found for address 0x%x", *address)
+ }
+ return fmt.Errorf("address 0x%x found in binary, but the corresponding symbols do not have samples in the profile", *address)
+ }
+
+ // Correlate the symbols from the binary with the profile samples.
+ for _, s := range syms {
+ sns := symNodes[s]
+
+ // Gather samples for this symbol.
+ flatSum, cumSum := sns.Sum()
+
+ // Get the function assembly.
+ insts, err := obj.Disasm(s.sym.File, s.sym.Start, s.sym.End, o.IntelSyntax)
+ if err != nil {
+ return err
+ }
+
+ ns := annotateAssembly(insts, sns, s.file)
+
+ fmt.Fprintf(w, "ROUTINE ======================== %s\n", s.sym.Name[0])
+ for _, name := range s.sym.Name[1:] {
+ fmt.Fprintf(w, " AKA ======================== %s\n", name)
+ }
+ fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n",
+ rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+ measurement.Percentage(cumSum, rpt.total))
+
+ function, file, line := "", "", 0
+ for _, n := range ns {
+ locStr := ""
+ // Skip loc information if it hasn't changed from previous instruction.
+ if n.function != function || n.file != file || n.line != line {
+ function, file, line = n.function, n.file, n.line
+ if n.function != "" {
+ locStr = n.function + " "
+ }
+ if n.file != "" {
+ locStr += n.file
+ if n.line != 0 {
+ locStr += fmt.Sprintf(":%d", n.line)
+ }
+ }
+ }
+ switch {
+ case locStr == "":
+ // No location info, just print the instruction.
+ fmt.Fprintf(w, "%10s %10s %10x: %s\n",
+ valueOrDot(n.flatValue(), rpt),
+ valueOrDot(n.cumValue(), rpt),
+ n.address, n.instruction,
+ )
+ case len(n.instruction) < 40:
+ // Short instruction, print loc on the same line.
+ fmt.Fprintf(w, "%10s %10s %10x: %-40s;%s\n",
+ valueOrDot(n.flatValue(), rpt),
+ valueOrDot(n.cumValue(), rpt),
+ n.address, n.instruction,
+ locStr,
+ )
+ default:
+ // Long instruction, print loc on a separate line.
+ fmt.Fprintf(w, "%74s;%s\n", "", locStr)
+ fmt.Fprintf(w, "%10s %10s %10x: %s\n",
+ valueOrDot(n.flatValue(), rpt),
+ valueOrDot(n.cumValue(), rpt),
+ n.address, n.instruction,
+ )
+ }
+ }
+ }
+ return nil
+}
+
+// symbolsFromBinaries examines the binaries listed on the profile that have
+// associated samples, and returns the identified symbols matching rx.
+func symbolsFromBinaries(prof *profile.Profile, g *graph.Graph, rx *regexp.Regexp, address *uint64, obj plugin.ObjTool) []*objSymbol {
+ // fileHasSamplesAndMatched is for optimization to speed up pprof: when later
+ // walking through the profile mappings, it will only examine the ones that have
+ // samples and are matched to the regexp.
+ fileHasSamplesAndMatched := make(map[string]bool)
+ for _, n := range g.Nodes {
+ if name := n.Info.PrintableName(); rx.MatchString(name) && n.Info.Objfile != "" {
+ fileHasSamplesAndMatched[n.Info.Objfile] = true
+ }
+ }
+
+ // Walk all mappings looking for matching functions with samples.
+ var objSyms []*objSymbol
+ for _, m := range prof.Mapping {
+ // Skip the mapping if its file does not have samples or is not matched to
+ // the regexp (unless the regexp is an address and the mapping's range covers
+ // the address)
+ if !fileHasSamplesAndMatched[m.File] {
+ if address == nil || !(m.Start <= *address && *address <= m.Limit) {
+ continue
+ }
+ }
+
+ f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset, m.KernelRelocationSymbol)
+ if err != nil {
+ fmt.Printf("%v\n", err)
+ continue
+ }
+
+ // Find symbols in this binary matching the user regexp.
+ var addr uint64
+ if address != nil {
+ addr = *address
+ }
+ msyms, err := f.Symbols(rx, addr)
+ f.Close()
+ if err != nil {
+ continue
+ }
+ for _, ms := range msyms {
+ objSyms = append(objSyms,
+ &objSymbol{
+ sym: ms,
+ file: f,
+ },
+ )
+ }
+ }
+
+ return objSyms
+}
+
+// objSym represents a symbol identified from a binary. It includes
+// the SymbolInfo from the disasm package and the base that must be
+// added to correspond to sample addresses
+type objSymbol struct {
+ sym *plugin.Sym
+ file plugin.ObjFile
+}
+
+// orderSyms is a wrapper type to sort []*objSymbol by a supplied comparator.
+type orderSyms struct {
+ v []*objSymbol
+ less func(a, b *objSymbol) bool
+}
+
+func (o orderSyms) Len() int { return len(o.v) }
+func (o orderSyms) Less(i, j int) bool { return o.less(o.v[i], o.v[j]) }
+func (o orderSyms) Swap(i, j int) { o.v[i], o.v[j] = o.v[j], o.v[i] }
+
+// nodesPerSymbol classifies nodes into a group of symbols.
+func nodesPerSymbol(ns graph.Nodes, symbols []*objSymbol) map[*objSymbol]graph.Nodes {
+ symNodes := make(map[*objSymbol]graph.Nodes)
+ for _, s := range symbols {
+ // Gather samples for this symbol.
+ for _, n := range ns {
+ if address, err := s.file.ObjAddr(n.Info.Address); err == nil && address >= s.sym.Start && address < s.sym.End {
+ symNodes[s] = append(symNodes[s], n)
+ }
+ }
+ }
+ return symNodes
+}
+
+type assemblyInstruction struct {
+ address uint64
+ instruction string
+ function string
+ file string
+ line int
+ flat, cum int64
+ flatDiv, cumDiv int64
+ startsBlock bool
+ inlineCalls []callID
+}
+
+type callID struct {
+ file string
+ line int
+}
+
+func (a *assemblyInstruction) flatValue() int64 {
+ if a.flatDiv != 0 {
+ return a.flat / a.flatDiv
+ }
+ return a.flat
+}
+
+func (a *assemblyInstruction) cumValue() int64 {
+ if a.cumDiv != 0 {
+ return a.cum / a.cumDiv
+ }
+ return a.cum
+}
+
+// annotateAssembly annotates a set of assembly instructions with a
+// set of samples. It returns a set of nodes to display. base is an
+// offset to adjust the sample addresses.
+func annotateAssembly(insts []plugin.Inst, samples graph.Nodes, file plugin.ObjFile) []assemblyInstruction {
+ // Add end marker to simplify printing loop.
+ insts = append(insts, plugin.Inst{
+ Addr: ^uint64(0),
+ })
+
+ // Ensure samples are sorted by address.
+ samples.Sort(graph.AddressOrder)
+
+ s := 0
+ asm := make([]assemblyInstruction, 0, len(insts))
+ for ix, in := range insts[:len(insts)-1] {
+ n := assemblyInstruction{
+ address: in.Addr,
+ instruction: in.Text,
+ function: in.Function,
+ line: in.Line,
+ }
+ if in.File != "" {
+ n.file = filepath.Base(in.File)
+ }
+
+ // Sum all the samples until the next instruction (to account
+ // for samples attributed to the middle of an instruction).
+ for next := insts[ix+1].Addr; s < len(samples); s++ {
+ if addr, err := file.ObjAddr(samples[s].Info.Address); err != nil || addr >= next {
+ break
+ }
+ sample := samples[s]
+ n.flatDiv += sample.FlatDiv
+ n.flat += sample.Flat
+ n.cumDiv += sample.CumDiv
+ n.cum += sample.Cum
+ if f := sample.Info.File; f != "" && n.file == "" {
+ n.file = filepath.Base(f)
+ }
+ if ln := sample.Info.Lineno; ln != 0 && n.line == 0 {
+ n.line = ln
+ }
+ if f := sample.Info.Name; f != "" && n.function == "" {
+ n.function = f
+ }
+ }
+ asm = append(asm, n)
+ }
+
+ return asm
+}
+
+// valueOrDot formats a value according to a report, intercepting zero
+// values.
+func valueOrDot(value int64, rpt *Report) string {
+ if value == 0 {
+ return "."
+ }
+ return rpt.formatValue(value)
+}
+
+// printTags collects all tags referenced in the profile and prints
+// them in a sorted table.
+func printTags(w io.Writer, rpt *Report) error {
+ p := rpt.prof
+
+ o := rpt.options
+ formatTag := func(v int64, key string) string {
+ return measurement.ScaledLabel(v, key, o.OutputUnit)
+ }
+
+ // Hashtable to keep accumulate tags as key,value,count.
+ tagMap := make(map[string]map[string]int64)
+ for _, s := range p.Sample {
+ for key, vals := range s.Label {
+ for _, val := range vals {
+ valueMap, ok := tagMap[key]
+ if !ok {
+ valueMap = make(map[string]int64)
+ tagMap[key] = valueMap
+ }
+ valueMap[val] += o.SampleValue(s.Value)
+ }
+ }
+ for key, vals := range s.NumLabel {
+ unit := o.NumLabelUnits[key]
+ for _, nval := range vals {
+ val := formatTag(nval, unit)
+ valueMap, ok := tagMap[key]
+ if !ok {
+ valueMap = make(map[string]int64)
+ tagMap[key] = valueMap
+ }
+ valueMap[val] += o.SampleValue(s.Value)
+ }
+ }
+ }
+
+ tagKeys := make([]*graph.Tag, 0, len(tagMap))
+ for key := range tagMap {
+ tagKeys = append(tagKeys, &graph.Tag{Name: key})
+ }
+ tabw := tabwriter.NewWriter(w, 0, 0, 1, ' ', tabwriter.AlignRight)
+ for _, tagKey := range graph.SortTags(tagKeys, true) {
+ var total int64
+ key := tagKey.Name
+ tags := make([]*graph.Tag, 0, len(tagMap[key]))
+ for t, c := range tagMap[key] {
+ total += c
+ tags = append(tags, &graph.Tag{Name: t, Flat: c})
+ }
+
+ f, u := measurement.Scale(total, o.SampleUnit, o.OutputUnit)
+ fmt.Fprintf(tabw, "%s:\t Total %.1f%s\n", key, f, u)
+ for _, t := range graph.SortTags(tags, true) {
+ f, u := measurement.Scale(t.FlatValue(), o.SampleUnit, o.OutputUnit)
+ if total > 0 {
+ fmt.Fprintf(tabw, " \t%.1f%s (%s):\t %s\n", f, u, measurement.Percentage(t.FlatValue(), total), t.Name)
+ } else {
+ fmt.Fprintf(tabw, " \t%.1f%s:\t %s\n", f, u, t.Name)
+ }
+ }
+ fmt.Fprintln(tabw)
+ }
+ return tabw.Flush()
+}
+
+// printComments prints all freeform comments in the profile.
+func printComments(w io.Writer, rpt *Report) error {
+ p := rpt.prof
+
+ for _, c := range p.Comments {
+ fmt.Fprintln(w, c)
+ }
+ return nil
+}
+
+// TextItem holds a single text report entry.
+type TextItem struct {
+ Name string
+ InlineLabel string // Not empty if inlined
+ Flat, Cum int64 // Raw values
+ FlatFormat, CumFormat string // Formatted values
+}
+
+// TextItems returns a list of text items from the report and a list
+// of labels that describe the report.
+func TextItems(rpt *Report) ([]TextItem, []string) {
+ g, origCount, droppedNodes, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+ labels := reportLabels(rpt, g, origCount, droppedNodes, 0, false)
+
+ var items []TextItem
+ var flatSum int64
+ for _, n := range g.Nodes {
+ name, flat, cum := n.Info.PrintableName(), n.FlatValue(), n.CumValue()
+
+ var inline, noinline bool
+ for _, e := range n.In {
+ if e.Inline {
+ inline = true
+ } else {
+ noinline = true
+ }
+ }
+
+ var inl string
+ if inline {
+ if noinline {
+ inl = "(partial-inline)"
+ } else {
+ inl = "(inline)"
+ }
+ }
+
+ flatSum += flat
+ items = append(items, TextItem{
+ Name: name,
+ InlineLabel: inl,
+ Flat: flat,
+ Cum: cum,
+ FlatFormat: rpt.formatValue(flat),
+ CumFormat: rpt.formatValue(cum),
+ })
+ }
+ return items, labels
+}
+
+// printText prints a flat text report for a profile.
+func printText(w io.Writer, rpt *Report) error {
+ items, labels := TextItems(rpt)
+ fmt.Fprintln(w, strings.Join(labels, "\n"))
+ fmt.Fprintf(w, "%10s %5s%% %5s%% %10s %5s%%\n",
+ "flat", "flat", "sum", "cum", "cum")
+ var flatSum int64
+ for _, item := range items {
+ inl := item.InlineLabel
+ if inl != "" {
+ inl = " " + inl
+ }
+ flatSum += item.Flat
+ fmt.Fprintf(w, "%10s %s %s %10s %s %s%s\n",
+ item.FlatFormat, measurement.Percentage(item.Flat, rpt.total),
+ measurement.Percentage(flatSum, rpt.total),
+ item.CumFormat, measurement.Percentage(item.Cum, rpt.total),
+ item.Name, inl)
+ }
+ return nil
+}
+
+// printTraces prints all traces from a profile.
+func printTraces(w io.Writer, rpt *Report) error {
+ fmt.Fprintln(w, strings.Join(ProfileLabels(rpt), "\n"))
+
+ prof := rpt.prof
+ o := rpt.options
+
+ const separator = "-----------+-------------------------------------------------------"
+
+ _, locations := graph.CreateNodes(prof, &graph.Options{})
+ for _, sample := range prof.Sample {
+ type stk struct {
+ *graph.NodeInfo
+ inline bool
+ }
+ var stack []stk
+ for _, loc := range sample.Location {
+ nodes := locations[loc.ID]
+ for i, n := range nodes {
+ // The inline flag may be inaccurate if 'show' or 'hide' filter is
+ // used. See https://github.com/google/pprof/issues/511.
+ inline := i != len(nodes)-1
+ stack = append(stack, stk{&n.Info, inline})
+ }
+ }
+
+ if len(stack) == 0 {
+ continue
+ }
+
+ fmt.Fprintln(w, separator)
+ // Print any text labels for the sample.
+ var labels []string
+ for s, vs := range sample.Label {
+ labels = append(labels, fmt.Sprintf("%10s: %s\n", s, strings.Join(vs, " ")))
+ }
+ sort.Strings(labels)
+ fmt.Fprint(w, strings.Join(labels, ""))
+
+ // Print any numeric labels for the sample
+ var numLabels []string
+ for key, vals := range sample.NumLabel {
+ unit := o.NumLabelUnits[key]
+ numValues := make([]string, len(vals))
+ for i, vv := range vals {
+ numValues[i] = measurement.Label(vv, unit)
+ }
+ numLabels = append(numLabels, fmt.Sprintf("%10s: %s\n", key, strings.Join(numValues, " ")))
+ }
+ sort.Strings(numLabels)
+ fmt.Fprint(w, strings.Join(numLabels, ""))
+
+ var d, v int64
+ v = o.SampleValue(sample.Value)
+ if o.SampleMeanDivisor != nil {
+ d = o.SampleMeanDivisor(sample.Value)
+ }
+ // Print call stack.
+ if d != 0 {
+ v = v / d
+ }
+ for i, s := range stack {
+ var vs, inline string
+ if i == 0 {
+ vs = rpt.formatValue(v)
+ }
+ if s.inline {
+ inline = " (inline)"
+ }
+ fmt.Fprintf(w, "%10s %s%s\n", vs, s.PrintableName(), inline)
+ }
+ }
+ fmt.Fprintln(w, separator)
+ return nil
+}
+
+// printCallgrind prints a graph for a profile on callgrind format.
+func printCallgrind(w io.Writer, rpt *Report) error {
+ o := rpt.options
+ rpt.options.NodeFraction = 0
+ rpt.options.EdgeFraction = 0
+ rpt.options.NodeCount = 0
+
+ g, _, _, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+
+ nodeNames := getDisambiguatedNames(g)
+
+ fmt.Fprintln(w, "positions: instr line")
+ fmt.Fprintln(w, "events:", o.SampleType+"("+o.OutputUnit+")")
+
+ objfiles := make(map[string]int)
+ files := make(map[string]int)
+ names := make(map[string]int)
+
+ // prevInfo points to the previous NodeInfo.
+ // It is used to group cost lines together as much as possible.
+ var prevInfo *graph.NodeInfo
+ for _, n := range g.Nodes {
+ if prevInfo == nil || n.Info.Objfile != prevInfo.Objfile || n.Info.File != prevInfo.File || n.Info.Name != prevInfo.Name {
+ fmt.Fprintln(w)
+ fmt.Fprintln(w, "ob="+callgrindName(objfiles, n.Info.Objfile))
+ fmt.Fprintln(w, "fl="+callgrindName(files, n.Info.File))
+ fmt.Fprintln(w, "fn="+callgrindName(names, n.Info.Name))
+ }
+
+ addr := callgrindAddress(prevInfo, n.Info.Address)
+ sv, _ := measurement.Scale(n.FlatValue(), o.SampleUnit, o.OutputUnit)
+ fmt.Fprintf(w, "%s %d %d\n", addr, n.Info.Lineno, int64(sv))
+
+ // Print outgoing edges.
+ for _, out := range n.Out.Sort() {
+ c, _ := measurement.Scale(out.Weight, o.SampleUnit, o.OutputUnit)
+ callee := out.Dest
+ fmt.Fprintln(w, "cfl="+callgrindName(files, callee.Info.File))
+ fmt.Fprintln(w, "cfn="+callgrindName(names, nodeNames[callee]))
+ // pprof doesn't have a flat weight for a call, leave as 0.
+ fmt.Fprintf(w, "calls=0 %s %d\n", callgrindAddress(prevInfo, callee.Info.Address), callee.Info.Lineno)
+ // TODO: This address may be in the middle of a call
+ // instruction. It would be best to find the beginning
+ // of the instruction, but the tools seem to handle
+ // this OK.
+ fmt.Fprintf(w, "* * %d\n", int64(c))
+ }
+
+ prevInfo = &n.Info
+ }
+
+ return nil
+}
+
+// getDisambiguatedNames returns a map from each node in the graph to
+// the name to use in the callgrind output. Callgrind merges all
+// functions with the same [file name, function name]. Add a [%d/n]
+// suffix to disambiguate nodes with different values of
+// node.Function, which we want to keep separate. In particular, this
+// affects graphs created with --call_tree, where nodes from different
+// contexts are associated to different Functions.
+func getDisambiguatedNames(g *graph.Graph) map[*graph.Node]string {
+ nodeName := make(map[*graph.Node]string, len(g.Nodes))
+
+ type names struct {
+ file, function string
+ }
+
+ // nameFunctionIndex maps the callgrind names (filename, function)
+ // to the node.Function values found for that name, and each
+ // node.Function value to a sequential index to be used on the
+ // disambiguated name.
+ nameFunctionIndex := make(map[names]map[*graph.Node]int)
+ for _, n := range g.Nodes {
+ nm := names{n.Info.File, n.Info.Name}
+ p, ok := nameFunctionIndex[nm]
+ if !ok {
+ p = make(map[*graph.Node]int)
+ nameFunctionIndex[nm] = p
+ }
+ if _, ok := p[n.Function]; !ok {
+ p[n.Function] = len(p)
+ }
+ }
+
+ for _, n := range g.Nodes {
+ nm := names{n.Info.File, n.Info.Name}
+ nodeName[n] = n.Info.Name
+ if p := nameFunctionIndex[nm]; len(p) > 1 {
+ // If there is more than one function, add suffix to disambiguate.
+ nodeName[n] += fmt.Sprintf(" [%d/%d]", p[n.Function]+1, len(p))
+ }
+ }
+ return nodeName
+}
+
+// callgrindName implements the callgrind naming compression scheme.
+// For names not previously seen returns "(N) name", where N is a
+// unique index. For names previously seen returns "(N)" where N is
+// the index returned the first time.
+func callgrindName(names map[string]int, name string) string {
+ if name == "" {
+ return ""
+ }
+ if id, ok := names[name]; ok {
+ return fmt.Sprintf("(%d)", id)
+ }
+ id := len(names) + 1
+ names[name] = id
+ return fmt.Sprintf("(%d) %s", id, name)
+}
+
+// callgrindAddress implements the callgrind subposition compression scheme if
+// possible. If prevInfo != nil, it contains the previous address. The current
+// address can be given relative to the previous address, with an explicit +/-
+// to indicate it is relative, or * for the same address.
+func callgrindAddress(prevInfo *graph.NodeInfo, curr uint64) string {
+ abs := fmt.Sprintf("%#x", curr)
+ if prevInfo == nil {
+ return abs
+ }
+
+ prev := prevInfo.Address
+ if prev == curr {
+ return "*"
+ }
+
+ diff := int64(curr - prev)
+ relative := fmt.Sprintf("%+d", diff)
+
+ // Only bother to use the relative address if it is actually shorter.
+ if len(relative) < len(abs) {
+ return relative
+ }
+
+ return abs
+}
+
+// printTree prints a tree-based report in text form.
+func printTree(w io.Writer, rpt *Report) error {
+ const separator = "----------------------------------------------------------+-------------"
+ const legend = " flat flat% sum% cum cum% calls calls% + context "
+
+ g, origCount, droppedNodes, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+
+ fmt.Fprintln(w, strings.Join(reportLabels(rpt, g, origCount, droppedNodes, 0, false), "\n"))
+
+ fmt.Fprintln(w, separator)
+ fmt.Fprintln(w, legend)
+ var flatSum int64
+
+ rx := rpt.options.Symbol
+ matched := 0
+ for _, n := range g.Nodes {
+ name, flat, cum := n.Info.PrintableName(), n.FlatValue(), n.CumValue()
+
+ // Skip any entries that do not match the regexp (for the "peek" command).
+ if rx != nil && !rx.MatchString(name) {
+ continue
+ }
+ matched++
+
+ fmt.Fprintln(w, separator)
+ // Print incoming edges.
+ inEdges := n.In.Sort()
+ for _, in := range inEdges {
+ var inline string
+ if in.Inline {
+ inline = " (inline)"
+ }
+ fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(in.Weight),
+ measurement.Percentage(in.Weight, cum), in.Src.Info.PrintableName(), inline)
+ }
+
+ // Print current node.
+ flatSum += flat
+ fmt.Fprintf(w, "%10s %s %s %10s %s | %s\n",
+ rpt.formatValue(flat),
+ measurement.Percentage(flat, rpt.total),
+ measurement.Percentage(flatSum, rpt.total),
+ rpt.formatValue(cum),
+ measurement.Percentage(cum, rpt.total),
+ name)
+
+ // Print outgoing edges.
+ outEdges := n.Out.Sort()
+ for _, out := range outEdges {
+ var inline string
+ if out.Inline {
+ inline = " (inline)"
+ }
+ fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(out.Weight),
+ measurement.Percentage(out.Weight, cum), out.Dest.Info.PrintableName(), inline)
+ }
+ }
+ if len(g.Nodes) > 0 {
+ fmt.Fprintln(w, separator)
+ }
+ if rx != nil && matched == 0 {
+ return fmt.Errorf("no matches found for regexp: %s", rx)
+ }
+ return nil
+}
+
+// GetDOT returns a graph suitable for dot processing along with some
+// configuration information.
+func GetDOT(rpt *Report) (*graph.Graph, *graph.DotConfig) {
+ g, origCount, droppedNodes, droppedEdges := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+ labels := reportLabels(rpt, g, origCount, droppedNodes, droppedEdges, true)
+
+ c := &graph.DotConfig{
+ Title: rpt.options.Title,
+ Labels: labels,
+ FormatValue: rpt.formatValue,
+ Total: rpt.total,
+ }
+ return g, c
+}
+
+// printDOT prints an annotated callgraph in DOT format.
+func printDOT(w io.Writer, rpt *Report) error {
+ g, c := GetDOT(rpt)
+ graph.ComposeDot(w, g, &graph.DotAttributes{}, c)
+ return nil
+}
+
+// ProfileLabels returns printable labels for a profile.
+func ProfileLabels(rpt *Report) []string {
+ label := []string{}
+ prof := rpt.prof
+ o := rpt.options
+ if len(prof.Mapping) > 0 {
+ if prof.Mapping[0].File != "" {
+ label = append(label, "File: "+filepath.Base(prof.Mapping[0].File))
+ }
+ if prof.Mapping[0].BuildID != "" {
+ label = append(label, "Build ID: "+prof.Mapping[0].BuildID)
+ }
+ }
+ // Only include comments that do not start with '#'.
+ for _, c := range prof.Comments {
+ if !strings.HasPrefix(c, "#") {
+ label = append(label, c)
+ }
+ }
+ if o.SampleType != "" {
+ label = append(label, "Type: "+o.SampleType)
+ }
+ if prof.TimeNanos != 0 {
+ const layout = "Jan 2, 2006 at 3:04pm (MST)"
+ label = append(label, "Time: "+time.Unix(0, prof.TimeNanos).Format(layout))
+ }
+ if prof.DurationNanos != 0 {
+ duration := measurement.Label(prof.DurationNanos, "nanoseconds")
+ totalNanos, totalUnit := measurement.Scale(rpt.total, o.SampleUnit, "nanoseconds")
+ var ratio string
+ if totalUnit == "ns" && totalNanos != 0 {
+ ratio = "(" + measurement.Percentage(int64(totalNanos), prof.DurationNanos) + ")"
+ }
+ label = append(label, fmt.Sprintf("Duration: %s, Total samples = %s %s", duration, rpt.formatValue(rpt.total), ratio))
+ }
+ return label
+}
+
+// reportLabels returns printable labels for a report. Includes
+// profileLabels.
+func reportLabels(rpt *Report, g *graph.Graph, origCount, droppedNodes, droppedEdges int, fullHeaders bool) []string {
+ nodeFraction := rpt.options.NodeFraction
+ edgeFraction := rpt.options.EdgeFraction
+ nodeCount := len(g.Nodes)
+
+ var label []string
+ if len(rpt.options.ProfileLabels) > 0 {
+ label = append(label, rpt.options.ProfileLabels...)
+ } else if fullHeaders || !rpt.options.CompactLabels {
+ label = ProfileLabels(rpt)
+ }
+
+ var flatSum int64
+ for _, n := range g.Nodes {
+ flatSum = flatSum + n.FlatValue()
+ }
+
+ if len(rpt.options.ActiveFilters) > 0 {
+ activeFilters := legendActiveFilters(rpt.options.ActiveFilters)
+ label = append(label, activeFilters...)
+ }
+
+ label = append(label, fmt.Sprintf("Showing nodes accounting for %s, %s of %s total", rpt.formatValue(flatSum), strings.TrimSpace(measurement.Percentage(flatSum, rpt.total)), rpt.formatValue(rpt.total)))
+
+ if rpt.total != 0 {
+ if droppedNodes > 0 {
+ label = append(label, genLabel(droppedNodes, "node", "cum",
+ rpt.formatValue(abs64(int64(float64(rpt.total)*nodeFraction)))))
+ }
+ if droppedEdges > 0 {
+ label = append(label, genLabel(droppedEdges, "edge", "freq",
+ rpt.formatValue(abs64(int64(float64(rpt.total)*edgeFraction)))))
+ }
+ if nodeCount > 0 && nodeCount < origCount {
+ label = append(label, fmt.Sprintf("Showing top %d nodes out of %d",
+ nodeCount, origCount))
+ }
+ }
+
+ // Help new users understand the graph.
+ // A new line is intentionally added here to better show this message.
+ if fullHeaders {
+ label = append(label, "\nSee https://git.io/JfYMW for how to read the graph")
+ }
+
+ return label
+}
+
+func legendActiveFilters(activeFilters []string) []string {
+ legendActiveFilters := make([]string, len(activeFilters)+1)
+ legendActiveFilters[0] = "Active filters:"
+ for i, s := range activeFilters {
+ if len(s) > 80 {
+ s = s[:80] + "…"
+ }
+ legendActiveFilters[i+1] = " " + s
+ }
+ return legendActiveFilters
+}
+
+func genLabel(d int, n, l, f string) string {
+ if d > 1 {
+ n = n + "s"
+ }
+ return fmt.Sprintf("Dropped %d %s (%s <= %s)", d, n, l, f)
+}
+
+// New builds a new report indexing the sample values interpreting the
+// samples with the provided function.
+func New(prof *profile.Profile, o *Options) *Report {
+ format := func(v int64) string {
+ if r := o.Ratio; r > 0 && r != 1 {
+ fv := float64(v) * r
+ v = int64(fv)
+ }
+ return measurement.ScaledLabel(v, o.SampleUnit, o.OutputUnit)
+ }
+ return &Report{prof, computeTotal(prof, o.SampleValue, o.SampleMeanDivisor),
+ o, format}
+}
+
+// NewDefault builds a new report indexing the last sample value
+// available.
+func NewDefault(prof *profile.Profile, options Options) *Report {
+ index := len(prof.SampleType) - 1
+ o := &options
+ if o.Title == "" && len(prof.Mapping) > 0 && prof.Mapping[0].File != "" {
+ o.Title = filepath.Base(prof.Mapping[0].File)
+ }
+ o.SampleType = prof.SampleType[index].Type
+ o.SampleUnit = strings.ToLower(prof.SampleType[index].Unit)
+ o.SampleValue = func(v []int64) int64 {
+ return v[index]
+ }
+ return New(prof, o)
+}
+
+// computeTotal computes the sum of the absolute value of all sample values.
+// If any samples have label indicating they belong to the diff base, then the
+// total will only include samples with that label.
+func computeTotal(prof *profile.Profile, value, meanDiv func(v []int64) int64) int64 {
+ var div, total, diffDiv, diffTotal int64
+ for _, sample := range prof.Sample {
+ var d, v int64
+ v = value(sample.Value)
+ if meanDiv != nil {
+ d = meanDiv(sample.Value)
+ }
+ if v < 0 {
+ v = -v
+ }
+ total += v
+ div += d
+ if sample.DiffBaseSample() {
+ diffTotal += v
+ diffDiv += d
+ }
+ }
+ if diffTotal > 0 {
+ total = diffTotal
+ div = diffDiv
+ }
+ if div != 0 {
+ return total / div
+ }
+ return total
+}
+
+// Report contains the data and associated routines to extract a
+// report from a profile.
+type Report struct {
+ prof *profile.Profile
+ total int64
+ options *Options
+ formatValue func(int64) string
+}
+
+// Total returns the total number of samples in a report.
+func (rpt *Report) Total() int64 { return rpt.total }
+
+func abs64(i int64) int64 {
+ if i < 0 {
+ return -i
+ }
+ return i
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/shortnames.go b/src/cmd/vendor/github.com/google/pprof/internal/report/shortnames.go
new file mode 100644
index 0000000..3d9f3f4
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/shortnames.go
@@ -0,0 +1,39 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package report
+
+import (
+ "regexp"
+
+ "github.com/google/pprof/internal/graph"
+)
+
+var sepRE = regexp.MustCompile(`::|\.`)
+
+// shortNameList returns a non-empty sequence of shortened names
+// (in decreasing preference) that can be used to represent name.
+func shortNameList(name string) []string {
+ name = graph.ShortenFunctionName(name)
+ seps := sepRE.FindAllStringIndex(name, -1)
+ result := make([]string, 0, len(seps)+1)
+ result = append(result, name)
+ for _, sep := range seps {
+ // Suffix starting just after sep
+ if sep[1] < len(name) {
+ result = append(result, name[sep[1]:])
+ }
+ }
+ return result
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/source.go b/src/cmd/vendor/github.com/google/pprof/internal/report/source.go
new file mode 100644
index 0000000..d8b4395
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/source.go
@@ -0,0 +1,1114 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package report
+
+// This file contains routines related to the generation of annotated
+// source listings.
+
+import (
+ "bufio"
+ "fmt"
+ "html/template"
+ "io"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// printSource prints an annotated source listing, include all
+// functions with samples that match the regexp rpt.options.symbol.
+// The sources are sorted by function name and then by filename to
+// eliminate potential nondeterminism.
+func printSource(w io.Writer, rpt *Report) error {
+ o := rpt.options
+ g := rpt.newGraph(nil)
+
+ // Identify all the functions that match the regexp provided.
+ // Group nodes for each matching function.
+ var functions graph.Nodes
+ functionNodes := make(map[string]graph.Nodes)
+ for _, n := range g.Nodes {
+ if !o.Symbol.MatchString(n.Info.Name) {
+ continue
+ }
+ if functionNodes[n.Info.Name] == nil {
+ functions = append(functions, n)
+ }
+ functionNodes[n.Info.Name] = append(functionNodes[n.Info.Name], n)
+ }
+ functions.Sort(graph.NameOrder)
+
+ if len(functionNodes) == 0 {
+ return fmt.Errorf("no matches found for regexp: %s", o.Symbol)
+ }
+
+ sourcePath := o.SourcePath
+ if sourcePath == "" {
+ wd, err := os.Getwd()
+ if err != nil {
+ return fmt.Errorf("could not stat current dir: %v", err)
+ }
+ sourcePath = wd
+ }
+ reader := newSourceReader(sourcePath, o.TrimPath)
+
+ fmt.Fprintf(w, "Total: %s\n", rpt.formatValue(rpt.total))
+ for _, fn := range functions {
+ name := fn.Info.Name
+
+ // Identify all the source files associated to this function.
+ // Group nodes for each source file.
+ var sourceFiles graph.Nodes
+ fileNodes := make(map[string]graph.Nodes)
+ for _, n := range functionNodes[name] {
+ if n.Info.File == "" {
+ continue
+ }
+ if fileNodes[n.Info.File] == nil {
+ sourceFiles = append(sourceFiles, n)
+ }
+ fileNodes[n.Info.File] = append(fileNodes[n.Info.File], n)
+ }
+
+ if len(sourceFiles) == 0 {
+ fmt.Fprintf(w, "No source information for %s\n", name)
+ continue
+ }
+
+ sourceFiles.Sort(graph.FileOrder)
+
+ // Print each file associated with this function.
+ for _, fl := range sourceFiles {
+ filename := fl.Info.File
+ fns := fileNodes[filename]
+ flatSum, cumSum := fns.Sum()
+
+ fnodes, _, err := getSourceFromFile(filename, reader, fns, 0, 0)
+ fmt.Fprintf(w, "ROUTINE ======================== %s in %s\n", name, filename)
+ fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n",
+ rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+ measurement.Percentage(cumSum, rpt.total))
+
+ if err != nil {
+ fmt.Fprintf(w, " Error: %v\n", err)
+ continue
+ }
+
+ for _, fn := range fnodes {
+ fmt.Fprintf(w, "%10s %10s %6d:%s\n", valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt), fn.Info.Lineno, fn.Info.Name)
+ }
+ }
+ }
+ return nil
+}
+
+// printWebSource prints an annotated source listing, include all
+// functions with samples that match the regexp rpt.options.symbol.
+func printWebSource(w io.Writer, rpt *Report, obj plugin.ObjTool) error {
+ printHeader(w, rpt)
+ if err := PrintWebList(w, rpt, obj, -1); err != nil {
+ return err
+ }
+ printPageClosing(w)
+ return nil
+}
+
+// sourcePrinter holds state needed for generating source+asm HTML listing.
+type sourcePrinter struct {
+ reader *sourceReader
+ synth *synthCode
+ objectTool plugin.ObjTool
+ objects map[string]plugin.ObjFile // Opened object files
+ sym *regexp.Regexp // May be nil
+ files map[string]*sourceFile // Set of files to print.
+ insts map[uint64]instructionInfo // Instructions of interest (keyed by address).
+
+ // Set of function names that we are interested in (because they had
+ // a sample and match sym).
+ interest map[string]bool
+
+ // Mapping from system function names to printable names.
+ prettyNames map[string]string
+}
+
+// addrInfo holds information for an address we are interested in.
+type addrInfo struct {
+ loc *profile.Location // Always non-nil
+ obj plugin.ObjFile // May be nil
+}
+
+// instructionInfo holds collected information for an instruction.
+type instructionInfo struct {
+ objAddr uint64 // Address in object file (with base subtracted out)
+ length int // Instruction length in bytes
+ disasm string // Disassembly of instruction
+ file string // For top-level function in which instruction occurs
+ line int // For top-level function in which instruction occurs
+ flat, cum int64 // Samples to report (divisor already applied)
+}
+
+// sourceFile contains collected information for files we will print.
+type sourceFile struct {
+ fname string
+ cum int64
+ flat int64
+ lines map[int][]sourceInst // Instructions to show per line
+ funcName map[int]string // Function name per line
+}
+
+// sourceInst holds information for an instruction to be displayed.
+type sourceInst struct {
+ addr uint64
+ stack []callID // Inlined call-stack
+}
+
+// sourceFunction contains information for a contiguous range of lines per function we
+// will print.
+type sourceFunction struct {
+ name string
+ begin, end int // Line numbers (end is not included in the range)
+ flat, cum int64
+}
+
+// addressRange is a range of addresses plus the object file that contains it.
+type addressRange struct {
+ begin, end uint64
+ obj plugin.ObjFile
+ mapping *profile.Mapping
+ score int64 // Used to order ranges for processing
+}
+
+// PrintWebList prints annotated source listing of rpt to w.
+// rpt.prof should contain inlined call info.
+func PrintWebList(w io.Writer, rpt *Report, obj plugin.ObjTool, maxFiles int) error {
+ sourcePath := rpt.options.SourcePath
+ if sourcePath == "" {
+ wd, err := os.Getwd()
+ if err != nil {
+ return fmt.Errorf("could not stat current dir: %v", err)
+ }
+ sourcePath = wd
+ }
+ sp := newSourcePrinter(rpt, obj, sourcePath)
+ if len(sp.interest) == 0 {
+ return fmt.Errorf("no matches found for regexp: %s", rpt.options.Symbol)
+ }
+ sp.print(w, maxFiles, rpt)
+ sp.close()
+ return nil
+}
+
+func newSourcePrinter(rpt *Report, obj plugin.ObjTool, sourcePath string) *sourcePrinter {
+ sp := &sourcePrinter{
+ reader: newSourceReader(sourcePath, rpt.options.TrimPath),
+ synth: newSynthCode(rpt.prof.Mapping),
+ objectTool: obj,
+ objects: map[string]plugin.ObjFile{},
+ sym: rpt.options.Symbol,
+ files: map[string]*sourceFile{},
+ insts: map[uint64]instructionInfo{},
+ prettyNames: map[string]string{},
+ interest: map[string]bool{},
+ }
+
+ // If the regexp source can be parsed as an address, also match
+ // functions that land on that address.
+ var address *uint64
+ if sp.sym != nil {
+ if hex, err := strconv.ParseUint(sp.sym.String(), 0, 64); err == nil {
+ address = &hex
+ }
+ }
+
+ addrs := map[uint64]addrInfo{}
+ flat := map[uint64]int64{}
+ cum := map[uint64]int64{}
+
+ // Record an interest in the function corresponding to lines[index].
+ markInterest := func(addr uint64, loc *profile.Location, index int) {
+ fn := loc.Line[index]
+ if fn.Function == nil {
+ return
+ }
+ sp.interest[fn.Function.Name] = true
+ sp.interest[fn.Function.SystemName] = true
+ if _, ok := addrs[addr]; !ok {
+ addrs[addr] = addrInfo{loc, sp.objectFile(loc.Mapping)}
+ }
+ }
+
+ // See if sp.sym matches line.
+ matches := func(line profile.Line) bool {
+ if line.Function == nil {
+ return false
+ }
+ return sp.sym.MatchString(line.Function.Name) ||
+ sp.sym.MatchString(line.Function.SystemName) ||
+ sp.sym.MatchString(line.Function.Filename)
+ }
+
+ // Extract sample counts and compute set of interesting functions.
+ for _, sample := range rpt.prof.Sample {
+ value := rpt.options.SampleValue(sample.Value)
+ if rpt.options.SampleMeanDivisor != nil {
+ div := rpt.options.SampleMeanDivisor(sample.Value)
+ if div != 0 {
+ value /= div
+ }
+ }
+
+ // Find call-sites matching sym.
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ loc := sample.Location[i]
+ for _, line := range loc.Line {
+ if line.Function == nil {
+ continue
+ }
+ sp.prettyNames[line.Function.SystemName] = line.Function.Name
+ }
+
+ addr := loc.Address
+ if addr == 0 {
+ // Some profiles are missing valid addresses.
+ addr = sp.synth.address(loc)
+ }
+
+ cum[addr] += value
+ if i == 0 {
+ flat[addr] += value
+ }
+
+ if sp.sym == nil || (address != nil && addr == *address) {
+ // Interested in top-level entry of stack.
+ if len(loc.Line) > 0 {
+ markInterest(addr, loc, len(loc.Line)-1)
+ }
+ continue
+ }
+
+ // Search in inlined stack for a match.
+ matchFile := (loc.Mapping != nil && sp.sym.MatchString(loc.Mapping.File))
+ for j, line := range loc.Line {
+ if (j == 0 && matchFile) || matches(line) {
+ markInterest(addr, loc, j)
+ }
+ }
+ }
+ }
+
+ sp.expandAddresses(rpt, addrs, flat)
+ sp.initSamples(flat, cum)
+ return sp
+}
+
+func (sp *sourcePrinter) close() {
+ for _, objFile := range sp.objects {
+ if objFile != nil {
+ objFile.Close()
+ }
+ }
+}
+
+func (sp *sourcePrinter) expandAddresses(rpt *Report, addrs map[uint64]addrInfo, flat map[uint64]int64) {
+ // We found interesting addresses (ones with non-zero samples) above.
+ // Get covering address ranges and disassemble the ranges.
+ ranges, unprocessed := sp.splitIntoRanges(rpt.prof, addrs, flat)
+ sp.handleUnprocessed(addrs, unprocessed)
+
+ // Trim ranges if there are too many.
+ const maxRanges = 25
+ sort.Slice(ranges, func(i, j int) bool {
+ return ranges[i].score > ranges[j].score
+ })
+ if len(ranges) > maxRanges {
+ ranges = ranges[:maxRanges]
+ }
+
+ for _, r := range ranges {
+ objBegin, err := r.obj.ObjAddr(r.begin)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Failed to compute objdump address for range start %x: %v\n", r.begin, err)
+ continue
+ }
+ objEnd, err := r.obj.ObjAddr(r.end)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Failed to compute objdump address for range end %x: %v\n", r.end, err)
+ continue
+ }
+ base := r.begin - objBegin
+ insts, err := sp.objectTool.Disasm(r.mapping.File, objBegin, objEnd, rpt.options.IntelSyntax)
+ if err != nil {
+ // TODO(sanjay): Report that the covered addresses are missing.
+ continue
+ }
+
+ var lastFrames []plugin.Frame
+ var lastAddr, maxAddr uint64
+ for i, inst := range insts {
+ addr := inst.Addr + base
+
+ // Guard against duplicate output from Disasm.
+ if addr <= maxAddr {
+ continue
+ }
+ maxAddr = addr
+
+ length := 1
+ if i+1 < len(insts) && insts[i+1].Addr > inst.Addr {
+ // Extend to next instruction.
+ length = int(insts[i+1].Addr - inst.Addr)
+ }
+
+ // Get inlined-call-stack for address.
+ frames, err := r.obj.SourceLine(addr)
+ if err != nil {
+ // Construct a frame from disassembler output.
+ frames = []plugin.Frame{{Func: inst.Function, File: inst.File, Line: inst.Line}}
+ }
+
+ x := instructionInfo{objAddr: inst.Addr, length: length, disasm: inst.Text}
+ if len(frames) > 0 {
+ // We could consider using the outer-most caller's source
+ // location so we give the some hint as to where the
+ // inlining happened that led to this instruction. So for
+ // example, suppose we have the following (inlined) call
+ // chains for this instruction:
+ // F1->G->H
+ // F2->G->H
+ // We could tag the instructions from the first call with
+ // F1 and instructions from the second call with F2. But
+ // that leads to a somewhat confusing display. So for now,
+ // we stick with just the inner-most location (i.e., H).
+ // In the future we will consider changing the display to
+ // make caller info more visible.
+ index := 0 // Inner-most frame
+ x.file = frames[index].File
+ x.line = frames[index].Line
+ }
+ sp.insts[addr] = x
+
+ // We sometimes get instructions with a zero reported line number.
+ // Make such instructions have the same line info as the preceding
+ // instruction, if an earlier instruction is found close enough.
+ const neighborhood = 32
+ if len(frames) > 0 && frames[0].Line != 0 {
+ lastFrames = frames
+ lastAddr = addr
+ } else if (addr-lastAddr <= neighborhood) && lastFrames != nil {
+ frames = lastFrames
+ }
+
+ sp.addStack(addr, frames)
+ }
+ }
+}
+
+func (sp *sourcePrinter) addStack(addr uint64, frames []plugin.Frame) {
+ // See if the stack contains a function we are interested in.
+ for i, f := range frames {
+ if !sp.interest[f.Func] {
+ continue
+ }
+
+ // Record sub-stack under frame's file/line.
+ fname := canonicalizeFileName(f.File)
+ file := sp.files[fname]
+ if file == nil {
+ file = &sourceFile{
+ fname: fname,
+ lines: map[int][]sourceInst{},
+ funcName: map[int]string{},
+ }
+ sp.files[fname] = file
+ }
+ callees := frames[:i]
+ stack := make([]callID, 0, len(callees))
+ for j := len(callees) - 1; j >= 0; j-- { // Reverse so caller is first
+ stack = append(stack, callID{
+ file: callees[j].File,
+ line: callees[j].Line,
+ })
+ }
+ file.lines[f.Line] = append(file.lines[f.Line], sourceInst{addr, stack})
+
+ // Remember the first function name encountered per source line
+ // and assume that that line belongs to that function.
+ if _, ok := file.funcName[f.Line]; !ok {
+ file.funcName[f.Line] = f.Func
+ }
+ }
+}
+
+// synthAsm is the special disassembler value used for instructions without an object file.
+const synthAsm = ""
+
+// handleUnprocessed handles addresses that were skipped by splitIntoRanges because they
+// did not belong to a known object file.
+func (sp *sourcePrinter) handleUnprocessed(addrs map[uint64]addrInfo, unprocessed []uint64) {
+ // makeFrames synthesizes a []plugin.Frame list for the specified address.
+ // The result will typically have length 1, but may be longer if address corresponds
+ // to inlined calls.
+ makeFrames := func(addr uint64) []plugin.Frame {
+ loc := addrs[addr].loc
+ stack := make([]plugin.Frame, 0, len(loc.Line))
+ for _, line := range loc.Line {
+ fn := line.Function
+ if fn == nil {
+ continue
+ }
+ stack = append(stack, plugin.Frame{
+ Func: fn.Name,
+ File: fn.Filename,
+ Line: int(line.Line),
+ })
+ }
+ return stack
+ }
+
+ for _, addr := range unprocessed {
+ frames := makeFrames(addr)
+ x := instructionInfo{
+ objAddr: addr,
+ length: 1,
+ disasm: synthAsm,
+ }
+ if len(frames) > 0 {
+ x.file = frames[0].File
+ x.line = frames[0].Line
+ }
+ sp.insts[addr] = x
+
+ sp.addStack(addr, frames)
+ }
+}
+
+// splitIntoRanges converts the set of addresses we are interested in into a set of address
+// ranges to disassemble. It also returns the set of addresses found that did not have an
+// associated object file and were therefore not added to an address range.
+func (sp *sourcePrinter) splitIntoRanges(prof *profile.Profile, addrMap map[uint64]addrInfo, flat map[uint64]int64) ([]addressRange, []uint64) {
+ // Partition addresses into two sets: ones with a known object file, and ones without.
+ var addrs, unprocessed []uint64
+ for addr, info := range addrMap {
+ if info.obj != nil {
+ addrs = append(addrs, addr)
+ } else {
+ unprocessed = append(unprocessed, addr)
+ }
+ }
+ sort.Slice(addrs, func(i, j int) bool { return addrs[i] < addrs[j] })
+
+ const expand = 500 // How much to expand range to pick up nearby addresses.
+ var result []addressRange
+ for i, n := 0, len(addrs); i < n; {
+ begin, end := addrs[i], addrs[i]
+ sum := flat[begin]
+ i++
+
+ info := addrMap[begin]
+ m := info.loc.Mapping
+ obj := info.obj // Non-nil because of the partitioning done above.
+
+ // Find following addresses that are close enough to addrs[i].
+ for i < n && addrs[i] <= end+2*expand && addrs[i] < m.Limit {
+ // When we expand ranges by "expand" on either side, the ranges
+ // for addrs[i] and addrs[i-1] will merge.
+ end = addrs[i]
+ sum += flat[end]
+ i++
+ }
+ if m.Start-begin >= expand {
+ begin -= expand
+ } else {
+ begin = m.Start
+ }
+ if m.Limit-end >= expand {
+ end += expand
+ } else {
+ end = m.Limit
+ }
+
+ result = append(result, addressRange{begin, end, obj, m, sum})
+ }
+ return result, unprocessed
+}
+
+func (sp *sourcePrinter) initSamples(flat, cum map[uint64]int64) {
+ for addr, inst := range sp.insts {
+ // Move all samples that were assigned to the middle of an instruction to the
+ // beginning of that instruction. This takes care of samples that were recorded
+ // against pc+1.
+ instEnd := addr + uint64(inst.length)
+ for p := addr; p < instEnd; p++ {
+ inst.flat += flat[p]
+ inst.cum += cum[p]
+ }
+ sp.insts[addr] = inst
+ }
+}
+
+func (sp *sourcePrinter) print(w io.Writer, maxFiles int, rpt *Report) {
+ // Finalize per-file counts.
+ for _, file := range sp.files {
+ seen := map[uint64]bool{}
+ for _, line := range file.lines {
+ for _, x := range line {
+ if seen[x.addr] {
+ // Same address can be displayed multiple times in a file
+ // (e.g., if we show multiple inlined functions).
+ // Avoid double-counting samples in this case.
+ continue
+ }
+ seen[x.addr] = true
+ inst := sp.insts[x.addr]
+ file.cum += inst.cum
+ file.flat += inst.flat
+ }
+ }
+ }
+
+ // Get sorted list of files to print.
+ var files []*sourceFile
+ for _, f := range sp.files {
+ files = append(files, f)
+ }
+ order := func(i, j int) bool { return files[i].flat > files[j].flat }
+ if maxFiles < 0 {
+ // Order by name for compatibility with old code.
+ order = func(i, j int) bool { return files[i].fname < files[j].fname }
+ maxFiles = len(files)
+ }
+ sort.Slice(files, order)
+ for i, f := range files {
+ if i < maxFiles {
+ sp.printFile(w, f, rpt)
+ }
+ }
+}
+
+func (sp *sourcePrinter) printFile(w io.Writer, f *sourceFile, rpt *Report) {
+ for _, fn := range sp.functions(f) {
+ if fn.cum == 0 {
+ continue
+ }
+ printFunctionHeader(w, fn.name, f.fname, fn.flat, fn.cum, rpt)
+ var asm []assemblyInstruction
+ for l := fn.begin; l < fn.end; l++ {
+ lineContents, ok := sp.reader.line(f.fname, l)
+ if !ok {
+ if len(f.lines[l]) == 0 {
+ // Outside of range of valid lines and nothing to print.
+ continue
+ }
+ if l == 0 {
+ // Line number 0 shows up if line number is not known.
+ lineContents = "<instructions with unknown line numbers>"
+ } else {
+ // Past end of file, but have data to print.
+ lineContents = "???"
+ }
+ }
+
+ // Make list of assembly instructions.
+ asm = asm[:0]
+ var flatSum, cumSum int64
+ var lastAddr uint64
+ for _, inst := range f.lines[l] {
+ addr := inst.addr
+ x := sp.insts[addr]
+ flatSum += x.flat
+ cumSum += x.cum
+ startsBlock := (addr != lastAddr+uint64(sp.insts[lastAddr].length))
+ lastAddr = addr
+
+ // divisors already applied, so leave flatDiv,cumDiv as 0
+ asm = append(asm, assemblyInstruction{
+ address: x.objAddr,
+ instruction: x.disasm,
+ function: fn.name,
+ file: x.file,
+ line: x.line,
+ flat: x.flat,
+ cum: x.cum,
+ startsBlock: startsBlock,
+ inlineCalls: inst.stack,
+ })
+ }
+
+ printFunctionSourceLine(w, l, flatSum, cumSum, lineContents, asm, sp.reader, rpt)
+ }
+ printFunctionClosing(w)
+ }
+}
+
+// functions splits apart the lines to show in a file into a list of per-function ranges.
+func (sp *sourcePrinter) functions(f *sourceFile) []sourceFunction {
+ var funcs []sourceFunction
+
+ // Get interesting lines in sorted order.
+ lines := make([]int, 0, len(f.lines))
+ for l := range f.lines {
+ lines = append(lines, l)
+ }
+ sort.Ints(lines)
+
+ // Merge adjacent lines that are in same function and not too far apart.
+ const mergeLimit = 20
+ for _, l := range lines {
+ name := f.funcName[l]
+ if pretty, ok := sp.prettyNames[name]; ok {
+ // Use demangled name if available.
+ name = pretty
+ }
+
+ fn := sourceFunction{name: name, begin: l, end: l + 1}
+ for _, x := range f.lines[l] {
+ inst := sp.insts[x.addr]
+ fn.flat += inst.flat
+ fn.cum += inst.cum
+ }
+
+ // See if we should merge into preceding function.
+ if len(funcs) > 0 {
+ last := funcs[len(funcs)-1]
+ if l-last.end < mergeLimit && last.name == name {
+ last.end = l + 1
+ last.flat += fn.flat
+ last.cum += fn.cum
+ funcs[len(funcs)-1] = last
+ continue
+ }
+ }
+
+ // Add new function.
+ funcs = append(funcs, fn)
+ }
+
+ // Expand function boundaries to show neighborhood.
+ const expand = 5
+ for i, f := range funcs {
+ if i == 0 {
+ // Extend backwards, stopping at line number 1, but do not disturb 0
+ // since that is a special line number that can show up when addr2line
+ // cannot determine the real line number.
+ if f.begin > expand {
+ f.begin -= expand
+ } else if f.begin > 1 {
+ f.begin = 1
+ }
+ } else {
+ // Find gap from predecessor and divide between predecessor and f.
+ halfGap := (f.begin - funcs[i-1].end) / 2
+ if halfGap > expand {
+ halfGap = expand
+ }
+ funcs[i-1].end += halfGap
+ f.begin -= halfGap
+ }
+ funcs[i] = f
+ }
+
+ // Also extend the ending point of the last function.
+ if len(funcs) > 0 {
+ funcs[len(funcs)-1].end += expand
+ }
+
+ return funcs
+}
+
+// objectFile return the object for the specified mapping, opening it if necessary.
+// It returns nil on error.
+func (sp *sourcePrinter) objectFile(m *profile.Mapping) plugin.ObjFile {
+ if m == nil {
+ return nil
+ }
+ if object, ok := sp.objects[m.File]; ok {
+ return object // May be nil if we detected an error earlier.
+ }
+ object, err := sp.objectTool.Open(m.File, m.Start, m.Limit, m.Offset, m.KernelRelocationSymbol)
+ if err != nil {
+ object = nil
+ }
+ sp.objects[m.File] = object // Cache even on error.
+ return object
+}
+
+// printHeader prints the page header for a weblist report.
+func printHeader(w io.Writer, rpt *Report) {
+ fmt.Fprintln(w, `
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="UTF-8">
+<title>Pprof listing</title>`)
+ fmt.Fprintln(w, weblistPageCSS)
+ fmt.Fprintln(w, weblistPageScript)
+ fmt.Fprint(w, "</head>\n<body>\n\n")
+
+ var labels []string
+ for _, l := range ProfileLabels(rpt) {
+ labels = append(labels, template.HTMLEscapeString(l))
+ }
+
+ fmt.Fprintf(w, `<div class="legend">%s<br>Total: %s</div>`,
+ strings.Join(labels, "<br>\n"),
+ rpt.formatValue(rpt.total),
+ )
+}
+
+// printFunctionHeader prints a function header for a weblist report.
+func printFunctionHeader(w io.Writer, name, path string, flatSum, cumSum int64, rpt *Report) {
+ fmt.Fprintf(w, `<h2>%s</h2><p class="filename">%s</p>
+<pre onClick="pprof_toggle_asm(event)">
+ Total: %10s %10s (flat, cum) %s
+`,
+ template.HTMLEscapeString(name), template.HTMLEscapeString(path),
+ rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+ measurement.Percentage(cumSum, rpt.total))
+}
+
+// printFunctionSourceLine prints a source line and the corresponding assembly.
+func printFunctionSourceLine(w io.Writer, lineNo int, flat, cum int64, lineContents string,
+ assembly []assemblyInstruction, reader *sourceReader, rpt *Report) {
+ if len(assembly) == 0 {
+ fmt.Fprintf(w,
+ "<span class=line> %6d</span> <span class=nop> %10s %10s %8s %s </span>\n",
+ lineNo,
+ valueOrDot(flat, rpt), valueOrDot(cum, rpt),
+ "", template.HTMLEscapeString(lineContents))
+ return
+ }
+
+ nestedInfo := false
+ cl := "deadsrc"
+ for _, an := range assembly {
+ if len(an.inlineCalls) > 0 || an.instruction != synthAsm {
+ nestedInfo = true
+ cl = "livesrc"
+ }
+ }
+
+ fmt.Fprintf(w,
+ "<span class=line> %6d</span> <span class=%s> %10s %10s %8s %s </span>",
+ lineNo, cl,
+ valueOrDot(flat, rpt), valueOrDot(cum, rpt),
+ "", template.HTMLEscapeString(lineContents))
+ if nestedInfo {
+ srcIndent := indentation(lineContents)
+ printNested(w, srcIndent, assembly, reader, rpt)
+ }
+ fmt.Fprintln(w)
+}
+
+func printNested(w io.Writer, srcIndent int, assembly []assemblyInstruction, reader *sourceReader, rpt *Report) {
+ fmt.Fprint(w, "<span class=asm>")
+ var curCalls []callID
+ for i, an := range assembly {
+ if an.startsBlock && i != 0 {
+ // Insert a separator between discontiguous blocks.
+ fmt.Fprintf(w, " %8s %28s\n", "", "â‹®")
+ }
+
+ var fileline string
+ if an.file != "" {
+ fileline = fmt.Sprintf("%s:%d", template.HTMLEscapeString(filepath.Base(an.file)), an.line)
+ }
+ flat, cum := an.flat, an.cum
+
+ // Print inlined call context.
+ for j, c := range an.inlineCalls {
+ if j < len(curCalls) && curCalls[j] == c {
+ // Skip if same as previous instruction.
+ continue
+ }
+ curCalls = nil
+ fline, ok := reader.line(c.file, c.line)
+ if !ok {
+ fline = ""
+ }
+ text := strings.Repeat(" ", srcIndent+4+4*j) + strings.TrimSpace(fline)
+ fmt.Fprintf(w, " %8s %10s %10s %8s <span class=inlinesrc>%s</span> <span class=unimportant>%s:%d</span>\n",
+ "", "", "", "",
+ template.HTMLEscapeString(rightPad(text, 80)),
+ template.HTMLEscapeString(filepath.Base(c.file)), c.line)
+ }
+ curCalls = an.inlineCalls
+ if an.instruction == synthAsm {
+ continue
+ }
+ text := strings.Repeat(" ", srcIndent+4+4*len(curCalls)) + an.instruction
+ fmt.Fprintf(w, " %8s %10s %10s %8x: %s <span class=unimportant>%s</span>\n",
+ "", valueOrDot(flat, rpt), valueOrDot(cum, rpt), an.address,
+ template.HTMLEscapeString(rightPad(text, 80)),
+ // fileline should not be escaped since it was formed by appending
+ // line number (just digits) to an escaped file name. Escaping here
+ // would cause double-escaping of file name.
+ fileline)
+ }
+ fmt.Fprint(w, "</span>")
+}
+
+// printFunctionClosing prints the end of a function in a weblist report.
+func printFunctionClosing(w io.Writer) {
+ fmt.Fprintln(w, "</pre>")
+}
+
+// printPageClosing prints the end of the page in a weblist report.
+func printPageClosing(w io.Writer) {
+ fmt.Fprintln(w, weblistPageClosing)
+}
+
+// getSourceFromFile collects the sources of a function from a source
+// file and annotates it with the samples in fns. Returns the sources
+// as nodes, using the info.name field to hold the source code.
+func getSourceFromFile(file string, reader *sourceReader, fns graph.Nodes, start, end int) (graph.Nodes, string, error) {
+ lineNodes := make(map[int]graph.Nodes)
+
+ // Collect source coordinates from profile.
+ const margin = 5 // Lines before first/after last sample.
+ if start == 0 {
+ if fns[0].Info.StartLine != 0 {
+ start = fns[0].Info.StartLine
+ } else {
+ start = fns[0].Info.Lineno - margin
+ }
+ } else {
+ start -= margin
+ }
+ if end == 0 {
+ end = fns[0].Info.Lineno
+ }
+ end += margin
+ for _, n := range fns {
+ lineno := n.Info.Lineno
+ nodeStart := n.Info.StartLine
+ if nodeStart == 0 {
+ nodeStart = lineno - margin
+ }
+ nodeEnd := lineno + margin
+ if nodeStart < start {
+ start = nodeStart
+ } else if nodeEnd > end {
+ end = nodeEnd
+ }
+ lineNodes[lineno] = append(lineNodes[lineno], n)
+ }
+ if start < 1 {
+ start = 1
+ }
+
+ var src graph.Nodes
+ for lineno := start; lineno <= end; lineno++ {
+ line, ok := reader.line(file, lineno)
+ if !ok {
+ break
+ }
+ flat, cum := lineNodes[lineno].Sum()
+ src = append(src, &graph.Node{
+ Info: graph.NodeInfo{
+ Name: strings.TrimRight(line, "\n"),
+ Lineno: lineno,
+ },
+ Flat: flat,
+ Cum: cum,
+ })
+ }
+ if err := reader.fileError(file); err != nil {
+ return nil, file, err
+ }
+ return src, file, nil
+}
+
+// sourceReader provides access to source code with caching of file contents.
+type sourceReader struct {
+ // searchPath is a filepath.ListSeparator-separated list of directories where
+ // source files should be searched.
+ searchPath string
+
+ // trimPath is a filepath.ListSeparator-separated list of paths to trim.
+ trimPath string
+
+ // files maps from path name to a list of lines.
+ // files[*][0] is unused since line numbering starts at 1.
+ files map[string][]string
+
+ // errors collects errors encountered per file. These errors are
+ // consulted before returning out of these module.
+ errors map[string]error
+}
+
+func newSourceReader(searchPath, trimPath string) *sourceReader {
+ return &sourceReader{
+ searchPath,
+ trimPath,
+ make(map[string][]string),
+ make(map[string]error),
+ }
+}
+
+func (reader *sourceReader) fileError(path string) error {
+ return reader.errors[path]
+}
+
+// line returns the line numbered "lineno" in path, or _,false if lineno is out of range.
+func (reader *sourceReader) line(path string, lineno int) (string, bool) {
+ lines, ok := reader.files[path]
+ if !ok {
+ // Read and cache file contents.
+ lines = []string{""} // Skip 0th line
+ f, err := openSourceFile(path, reader.searchPath, reader.trimPath)
+ if err != nil {
+ reader.errors[path] = err
+ } else {
+ s := bufio.NewScanner(f)
+ for s.Scan() {
+ lines = append(lines, s.Text())
+ }
+ f.Close()
+ if s.Err() != nil {
+ reader.errors[path] = err
+ }
+ }
+ reader.files[path] = lines
+ }
+ if lineno <= 0 || lineno >= len(lines) {
+ return "", false
+ }
+ return lines[lineno], true
+}
+
+// openSourceFile opens a source file from a name encoded in a profile. File
+// names in a profile after can be relative paths, so search them in each of
+// the paths in searchPath and their parents. In case the profile contains
+// absolute paths, additional paths may be configured to trim from the source
+// paths in the profile. This effectively turns the path into a relative path
+// searching it using searchPath as usual).
+func openSourceFile(path, searchPath, trim string) (*os.File, error) {
+ path = trimPath(path, trim, searchPath)
+ // If file is still absolute, require file to exist.
+ if filepath.IsAbs(path) {
+ f, err := os.Open(path)
+ return f, err
+ }
+ // Scan each component of the path.
+ for _, dir := range filepath.SplitList(searchPath) {
+ // Search up for every parent of each possible path.
+ for {
+ filename := filepath.Join(dir, path)
+ if f, err := os.Open(filename); err == nil {
+ return f, nil
+ }
+ parent := filepath.Dir(dir)
+ if parent == dir {
+ break
+ }
+ dir = parent
+ }
+ }
+
+ return nil, fmt.Errorf("could not find file %s on path %s", path, searchPath)
+}
+
+// trimPath cleans up a path by removing prefixes that are commonly
+// found on profiles plus configured prefixes.
+// TODO(aalexand): Consider optimizing out the redundant work done in this
+// function if it proves to matter.
+func trimPath(path, trimPath, searchPath string) string {
+ // Keep path variable intact as it's used below to form the return value.
+ sPath, searchPath := filepath.ToSlash(path), filepath.ToSlash(searchPath)
+ if trimPath == "" {
+ // If the trim path is not configured, try to guess it heuristically:
+ // search for basename of each search path in the original path and, if
+ // found, strip everything up to and including the basename. So, for
+ // example, given original path "/some/remote/path/my-project/foo/bar.c"
+ // and search path "/my/local/path/my-project" the heuristic will return
+ // "/my/local/path/my-project/foo/bar.c".
+ for _, dir := range filepath.SplitList(searchPath) {
+ want := "/" + filepath.Base(dir) + "/"
+ if found := strings.Index(sPath, want); found != -1 {
+ return path[found+len(want):]
+ }
+ }
+ }
+ // Trim configured trim prefixes.
+ trimPaths := append(filepath.SplitList(filepath.ToSlash(trimPath)), "/proc/self/cwd/./", "/proc/self/cwd/")
+ for _, trimPath := range trimPaths {
+ if !strings.HasSuffix(trimPath, "/") {
+ trimPath += "/"
+ }
+ if strings.HasPrefix(sPath, trimPath) {
+ return path[len(trimPath):]
+ }
+ }
+ return path
+}
+
+func indentation(line string) int {
+ column := 0
+ for _, c := range line {
+ if c == ' ' {
+ column++
+ } else if c == '\t' {
+ column++
+ for column%8 != 0 {
+ column++
+ }
+ } else {
+ break
+ }
+ }
+ return column
+}
+
+// rightPad pads the input with spaces on the right-hand-side to make it have
+// at least width n. It treats tabs as enough spaces that lead to the next
+// 8-aligned tab-stop.
+func rightPad(s string, n int) string {
+ var str strings.Builder
+
+ // Convert tabs to spaces as we go so padding works regardless of what prefix
+ // is placed before the result.
+ column := 0
+ for _, c := range s {
+ column++
+ if c == '\t' {
+ str.WriteRune(' ')
+ for column%8 != 0 {
+ column++
+ str.WriteRune(' ')
+ }
+ } else {
+ str.WriteRune(c)
+ }
+ }
+ for column < n {
+ column++
+ str.WriteRune(' ')
+ }
+ return str.String()
+}
+
+func canonicalizeFileName(fname string) string {
+ fname = strings.TrimPrefix(fname, "/proc/self/cwd/")
+ fname = strings.TrimPrefix(fname, "./")
+ return filepath.Clean(fname)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go b/src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go
new file mode 100644
index 0000000..851693f
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go
@@ -0,0 +1,75 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package report
+
+import (
+ "html/template"
+)
+
+// AddSourceTemplates adds templates used by PrintWebList to t.
+func AddSourceTemplates(t *template.Template) {
+ template.Must(t.Parse(`{{define "weblistcss"}}` + weblistPageCSS + `{{end}}`))
+ template.Must(t.Parse(`{{define "weblistjs"}}` + weblistPageScript + `{{end}}`))
+}
+
+const weblistPageCSS = `<style type="text/css">
+body #content{
+font-family: sans-serif;
+}
+h1 {
+ font-size: 1.5em;
+}
+.legend {
+ font-size: 1.25em;
+}
+.line, .nop, .unimportant {
+ color: #aaaaaa;
+}
+.inlinesrc {
+ color: #000066;
+}
+.livesrc {
+cursor: pointer;
+}
+.livesrc:hover {
+background-color: #eeeeee;
+}
+.asm {
+color: #008800;
+display: none;
+}
+</style>`
+
+const weblistPageScript = `<script type="text/javascript">
+function pprof_toggle_asm(e) {
+ var target;
+ if (!e) e = window.event;
+ if (e.target) target = e.target;
+ else if (e.srcElement) target = e.srcElement;
+
+ if (target) {
+ var asm = target.nextSibling;
+ if (asm && asm.className == "asm") {
+ asm.style.display = (asm.style.display == "block" ? "" : "block");
+ e.preventDefault();
+ return false;
+ }
+ }
+}
+</script>`
+
+const weblistPageClosing = `
+</body>
+</html>`
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/stacks.go b/src/cmd/vendor/github.com/google/pprof/internal/report/stacks.go
new file mode 100644
index 0000000..7db51bc
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/stacks.go
@@ -0,0 +1,194 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package report
+
+import (
+ "crypto/sha256"
+ "encoding/binary"
+ "fmt"
+ "regexp"
+
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/profile"
+)
+
+// StackSet holds a set of stacks corresponding to a profile.
+//
+// Slices in StackSet and the types it contains are always non-nil,
+// which makes Javascript code that uses the JSON encoding less error-prone.
+type StackSet struct {
+ Total int64 // Total value of the profile.
+ Scale float64 // Multiplier to generate displayed value
+ Type string // Profile type. E.g., "cpu".
+ Unit string // One of "B", "s", "GCU", or "" (if unknown)
+ Stacks []Stack // List of stored stacks
+ Sources []StackSource // Mapping from source index to info
+}
+
+// Stack holds a single stack instance.
+type Stack struct {
+ Value int64 // Total value for all samples of this stack.
+ Sources []int // Indices in StackSet.Sources (callers before callees).
+}
+
+// StackSource holds function/location info for a stack entry.
+type StackSource struct {
+ FullName string
+ FileName string
+ UniqueName string // Disambiguates functions with same names
+ Inlined bool // If true this source was inlined into its caller
+
+ // Alternative names to display (with decreasing lengths) to make text fit.
+ // Guaranteed to be non-empty.
+ Display []string
+
+ // Regular expression (anchored) that matches exactly FullName.
+ RE string
+
+ // Places holds the list of stack slots where this source occurs.
+ // In particular, if [a,b] is an element in Places,
+ // StackSet.Stacks[a].Sources[b] points to this source.
+ //
+ // No stack will be referenced twice in the Places slice for a given
+ // StackSource. In case of recursion, Places will contain the outer-most
+ // entry in the recursive stack. E.g., if stack S has source X at positions
+ // 4,6,9,10, the Places entry for X will contain [S,4].
+ Places []StackSlot
+
+ // Combined count of stacks where this source is the leaf.
+ Self int64
+
+ // Color number to use for this source.
+ // Colors with high numbers than supported may be treated as zero.
+ Color int
+}
+
+// StackSlot identifies a particular StackSlot.
+type StackSlot struct {
+ Stack int // Index in StackSet.Stacks
+ Pos int // Index in Stack.Sources
+}
+
+// Stacks returns a StackSet for the profile in rpt.
+func (rpt *Report) Stacks() StackSet {
+ // Get scale for converting to default unit of the right type.
+ scale, unit := measurement.Scale(1, rpt.options.SampleUnit, "default")
+ if unit == "default" {
+ unit = ""
+ }
+ if rpt.options.Ratio > 0 {
+ scale *= rpt.options.Ratio
+ }
+ s := &StackSet{
+ Total: rpt.total,
+ Scale: scale,
+ Type: rpt.options.SampleType,
+ Unit: unit,
+ Stacks: []Stack{}, // Ensure non-nil
+ Sources: []StackSource{}, // Ensure non-nil
+ }
+ s.makeInitialStacks(rpt)
+ s.fillPlaces()
+ s.assignColors()
+ return *s
+}
+
+func (s *StackSet) makeInitialStacks(rpt *Report) {
+ type key struct {
+ line profile.Line
+ inlined bool
+ }
+ srcs := map[key]int{} // Sources identified so far.
+ seenFunctions := map[string]bool{}
+ unknownIndex := 1
+ getSrc := func(line profile.Line, inlined bool) int {
+ k := key{line, inlined}
+ if i, ok := srcs[k]; ok {
+ return i
+ }
+ x := StackSource{Places: []StackSlot{}} // Ensure Places is non-nil
+ if fn := line.Function; fn != nil {
+ x.FullName = fn.Name
+ x.FileName = fn.Filename
+ if !seenFunctions[fn.Name] {
+ x.UniqueName = fn.Name
+ seenFunctions[fn.Name] = true
+ } else {
+ // Assign a different name so pivoting picks this function.
+ x.UniqueName = fmt.Sprint(fn.Name, "#", fn.ID)
+ }
+ } else {
+ x.FullName = fmt.Sprintf("?%d?", unknownIndex)
+ x.UniqueName = x.FullName
+ unknownIndex++
+ }
+ x.Inlined = inlined
+ x.RE = "^" + regexp.QuoteMeta(x.UniqueName) + "$"
+ x.Display = shortNameList(x.FullName)
+ s.Sources = append(s.Sources, x)
+ srcs[k] = len(s.Sources) - 1
+ return len(s.Sources) - 1
+ }
+
+ // Synthesized root location that will be placed at the beginning of each stack.
+ s.Sources = []StackSource{{
+ FullName: "root",
+ Display: []string{"root"},
+ Places: []StackSlot{},
+ }}
+
+ for _, sample := range rpt.prof.Sample {
+ value := rpt.options.SampleValue(sample.Value)
+ stack := Stack{Value: value, Sources: []int{0}} // Start with the root
+
+ // Note: we need to reverse the order in the produced stack.
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ loc := sample.Location[i]
+ for j := len(loc.Line) - 1; j >= 0; j-- {
+ line := loc.Line[j]
+ inlined := (j != len(loc.Line)-1)
+ stack.Sources = append(stack.Sources, getSrc(line, inlined))
+ }
+ }
+
+ leaf := stack.Sources[len(stack.Sources)-1]
+ s.Sources[leaf].Self += value
+ s.Stacks = append(s.Stacks, stack)
+ }
+}
+
+func (s *StackSet) fillPlaces() {
+ for i, stack := range s.Stacks {
+ seenSrcs := map[int]bool{}
+ for j, src := range stack.Sources {
+ if seenSrcs[src] {
+ continue
+ }
+ seenSrcs[src] = true
+ s.Sources[src].Places = append(s.Sources[src].Places, StackSlot{i, j})
+ }
+ }
+}
+
+func (s *StackSet) assignColors() {
+ // Assign different color indices to different packages.
+ const numColors = 1048576
+ for i, src := range s.Sources {
+ pkg := packageName(src.FullName)
+ h := sha256.Sum256([]byte(pkg))
+ index := binary.LittleEndian.Uint32(h[:])
+ s.Sources[i].Color = int(index % numColors)
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/synth.go b/src/cmd/vendor/github.com/google/pprof/internal/report/synth.go
new file mode 100644
index 0000000..7a35bbc
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/synth.go
@@ -0,0 +1,39 @@
+package report
+
+import (
+ "github.com/google/pprof/profile"
+)
+
+// synthCode assigns addresses to locations without an address.
+type synthCode struct {
+ next uint64
+ addr map[*profile.Location]uint64 // Synthesized address assigned to a location
+}
+
+func newSynthCode(mappings []*profile.Mapping) *synthCode {
+ // Find a larger address than any mapping.
+ s := &synthCode{next: 1}
+ for _, m := range mappings {
+ if s.next < m.Limit {
+ s.next = m.Limit
+ }
+ }
+ return s
+}
+
+// address returns the synthetic address for loc, creating one if needed.
+func (s *synthCode) address(loc *profile.Location) uint64 {
+ if loc.Address != 0 {
+ panic("can only synthesize addresses for locations without an address")
+ }
+ if addr, ok := s.addr[loc]; ok {
+ return addr
+ }
+ if s.addr == nil {
+ s.addr = map[*profile.Location]uint64{}
+ }
+ addr := s.next
+ s.next++
+ s.addr[loc] = addr
+ return addr
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go b/src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go
new file mode 100644
index 0000000..c3f6cc6
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go
@@ -0,0 +1,379 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package symbolizer provides a routine to populate a profile with
+// symbol, file and line number information. It relies on the
+// addr2liner and demangle packages to do the actual work.
+package symbolizer
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ "github.com/google/pprof/internal/binutils"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/symbolz"
+ "github.com/google/pprof/profile"
+ "github.com/ianlancetaylor/demangle"
+)
+
+// Symbolizer implements the plugin.Symbolize interface.
+type Symbolizer struct {
+ Obj plugin.ObjTool
+ UI plugin.UI
+ Transport http.RoundTripper
+}
+
+// test taps for dependency injection
+var symbolzSymbolize = symbolz.Symbolize
+var localSymbolize = doLocalSymbolize
+var demangleFunction = Demangle
+
+// Symbolize attempts to symbolize profile p. First uses binutils on
+// local binaries; if the source is a URL it attempts to get any
+// missed entries using symbolz.
+func (s *Symbolizer) Symbolize(mode string, sources plugin.MappingSources, p *profile.Profile) error {
+ remote, local, fast, force, demanglerMode := true, true, false, false, ""
+ for _, o := range strings.Split(strings.ToLower(mode), ":") {
+ switch o {
+ case "":
+ continue
+ case "none", "no":
+ return nil
+ case "local":
+ remote, local = false, true
+ case "fastlocal":
+ remote, local, fast = false, true, true
+ case "remote":
+ remote, local = true, false
+ case "force":
+ force = true
+ default:
+ switch d := strings.TrimPrefix(o, "demangle="); d {
+ case "full", "none", "templates":
+ demanglerMode = d
+ force = true
+ continue
+ case "default":
+ continue
+ }
+ s.UI.PrintErr("ignoring unrecognized symbolization option: " + mode)
+ s.UI.PrintErr("expecting -symbolize=[local|fastlocal|remote|none][:force][:demangle=[none|full|templates|default]")
+ }
+ }
+
+ var err error
+ if local {
+ // Symbolize locally using binutils.
+ if err = localSymbolize(p, fast, force, s.Obj, s.UI); err != nil {
+ s.UI.PrintErr("local symbolization: " + err.Error())
+ }
+ }
+ if remote {
+ post := func(source, post string) ([]byte, error) {
+ return postURL(source, post, s.Transport)
+ }
+ if err = symbolzSymbolize(p, force, sources, post, s.UI); err != nil {
+ return err // Ran out of options.
+ }
+ }
+
+ demangleFunction(p, force, demanglerMode)
+ return nil
+}
+
+// postURL issues a POST to a URL over HTTP.
+func postURL(source, post string, tr http.RoundTripper) ([]byte, error) {
+ client := &http.Client{
+ Transport: tr,
+ }
+ resp, err := client.Post(source, "application/octet-stream", strings.NewReader(post))
+ if err != nil {
+ return nil, fmt.Errorf("http post %s: %v", source, err)
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("http post %s: %v", source, statusCodeError(resp))
+ }
+ return io.ReadAll(resp.Body)
+}
+
+func statusCodeError(resp *http.Response) error {
+ if resp.Header.Get("X-Go-Pprof") != "" && strings.Contains(resp.Header.Get("Content-Type"), "text/plain") {
+ // error is from pprof endpoint
+ if body, err := io.ReadAll(resp.Body); err == nil {
+ return fmt.Errorf("server response: %s - %s", resp.Status, body)
+ }
+ }
+ return fmt.Errorf("server response: %s", resp.Status)
+}
+
+// doLocalSymbolize adds symbol and line number information to all locations
+// in a profile. mode enables some options to control
+// symbolization.
+func doLocalSymbolize(prof *profile.Profile, fast, force bool, obj plugin.ObjTool, ui plugin.UI) error {
+ if fast {
+ if bu, ok := obj.(*binutils.Binutils); ok {
+ bu.SetFastSymbolization(true)
+ }
+ }
+
+ mt, err := newMapping(prof, obj, ui, force)
+ if err != nil {
+ return err
+ }
+ defer mt.close()
+
+ functions := make(map[profile.Function]*profile.Function)
+ for _, l := range mt.prof.Location {
+ m := l.Mapping
+ segment := mt.segments[m]
+ if segment == nil {
+ // Nothing to do.
+ continue
+ }
+
+ stack, err := segment.SourceLine(l.Address)
+ if err != nil || len(stack) == 0 {
+ // No answers from addr2line.
+ continue
+ }
+
+ l.Line = make([]profile.Line, len(stack))
+ l.IsFolded = false
+ for i, frame := range stack {
+ if frame.Func != "" {
+ m.HasFunctions = true
+ }
+ if frame.File != "" {
+ m.HasFilenames = true
+ }
+ if frame.Line != 0 {
+ m.HasLineNumbers = true
+ }
+ f := &profile.Function{
+ Name: frame.Func,
+ SystemName: frame.Func,
+ Filename: frame.File,
+ }
+ if fp := functions[*f]; fp != nil {
+ f = fp
+ } else {
+ functions[*f] = f
+ f.ID = uint64(len(mt.prof.Function)) + 1
+ mt.prof.Function = append(mt.prof.Function, f)
+ }
+ l.Line[i] = profile.Line{
+ Function: f,
+ Line: int64(frame.Line),
+ }
+ }
+
+ if len(stack) > 0 {
+ m.HasInlineFrames = true
+ }
+ }
+
+ return nil
+}
+
+// Demangle updates the function names in a profile with demangled C++
+// names, simplified according to demanglerMode. If force is set,
+// overwrite any names that appear already demangled.
+func Demangle(prof *profile.Profile, force bool, demanglerMode string) {
+ if force {
+ // Remove the current demangled names to force demangling
+ for _, f := range prof.Function {
+ if f.Name != "" && f.SystemName != "" {
+ f.Name = f.SystemName
+ }
+ }
+ }
+
+ options := demanglerModeToOptions(demanglerMode)
+ for _, fn := range prof.Function {
+ demangleSingleFunction(fn, options)
+ }
+}
+
+func demanglerModeToOptions(demanglerMode string) []demangle.Option {
+ switch demanglerMode {
+ case "": // demangled, simplified: no parameters, no templates, no return type
+ return []demangle.Option{demangle.NoParams, demangle.NoEnclosingParams, demangle.NoTemplateParams}
+ case "templates": // demangled, simplified: no parameters, no return type
+ return []demangle.Option{demangle.NoParams, demangle.NoEnclosingParams}
+ case "full":
+ return []demangle.Option{demangle.NoClones}
+ case "none": // no demangling
+ return []demangle.Option{}
+ }
+
+ panic(fmt.Sprintf("unknown demanglerMode %s", demanglerMode))
+}
+
+func demangleSingleFunction(fn *profile.Function, options []demangle.Option) {
+ if fn.Name != "" && fn.SystemName != fn.Name {
+ return // Already demangled.
+ }
+ // Copy the options because they may be updated by the call.
+ o := make([]demangle.Option, len(options))
+ copy(o, options)
+ if demangled := demangle.Filter(fn.SystemName, o...); demangled != fn.SystemName {
+ fn.Name = demangled
+ return
+ }
+ // Could not demangle. Apply heuristics in case the name is
+ // already demangled.
+ name := fn.SystemName
+ if looksLikeDemangledCPlusPlus(name) {
+ for _, o := range options {
+ switch o {
+ case demangle.NoParams:
+ name = removeMatching(name, '(', ')')
+ case demangle.NoTemplateParams:
+ name = removeMatching(name, '<', '>')
+ }
+ }
+ }
+ fn.Name = name
+}
+
+// looksLikeDemangledCPlusPlus is a heuristic to decide if a name is
+// the result of demangling C++. If so, further heuristics will be
+// applied to simplify the name.
+func looksLikeDemangledCPlusPlus(demangled string) bool {
+ // Skip java names of the form "class.<init>".
+ if strings.Contains(demangled, ".<") {
+ return false
+ }
+ // Skip Go names of the form "foo.(*Bar[...]).Method".
+ if strings.Contains(demangled, "]).") {
+ return false
+ }
+ return strings.ContainsAny(demangled, "<>[]") || strings.Contains(demangled, "::")
+}
+
+// removeMatching removes nested instances of start..end from name.
+func removeMatching(name string, start, end byte) string {
+ s := string(start) + string(end)
+ var nesting, first, current int
+ for index := strings.IndexAny(name[current:], s); index != -1; index = strings.IndexAny(name[current:], s) {
+ switch current += index; name[current] {
+ case start:
+ nesting++
+ if nesting == 1 {
+ first = current
+ }
+ case end:
+ nesting--
+ switch {
+ case nesting < 0:
+ return name // Mismatch, abort
+ case nesting == 0:
+ name = name[:first] + name[current+1:]
+ current = first - 1
+ }
+ }
+ current++
+ }
+ return name
+}
+
+// newMapping creates a mappingTable for a profile.
+func newMapping(prof *profile.Profile, obj plugin.ObjTool, ui plugin.UI, force bool) (*mappingTable, error) {
+ mt := &mappingTable{
+ prof: prof,
+ segments: make(map[*profile.Mapping]plugin.ObjFile),
+ }
+
+ // Identify used mappings
+ mappings := make(map[*profile.Mapping]bool)
+ for _, l := range prof.Location {
+ mappings[l.Mapping] = true
+ }
+
+ missingBinaries := false
+ for midx, m := range prof.Mapping {
+ if !mappings[m] {
+ continue
+ }
+
+ // Do not attempt to re-symbolize a mapping that has already been symbolized.
+ if !force && (m.HasFunctions || m.HasFilenames || m.HasLineNumbers) {
+ continue
+ }
+
+ if m.File == "" {
+ if midx == 0 {
+ ui.PrintErr("Main binary filename not available.")
+ continue
+ }
+ missingBinaries = true
+ continue
+ }
+
+ // Skip well-known system mappings
+ if m.Unsymbolizable() {
+ continue
+ }
+
+ // Skip mappings pointing to a source URL
+ if m.BuildID == "" {
+ if u, err := url.Parse(m.File); err == nil && u.IsAbs() && strings.Contains(strings.ToLower(u.Scheme), "http") {
+ continue
+ }
+ }
+
+ name := filepath.Base(m.File)
+ if m.BuildID != "" {
+ name += fmt.Sprintf(" (build ID %s)", m.BuildID)
+ }
+ f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset, m.KernelRelocationSymbol)
+ if err != nil {
+ ui.PrintErr("Local symbolization failed for ", name, ": ", err)
+ missingBinaries = true
+ continue
+ }
+ if fid := f.BuildID(); m.BuildID != "" && fid != "" && fid != m.BuildID {
+ ui.PrintErr("Local symbolization failed for ", name, ": build ID mismatch")
+ f.Close()
+ continue
+ }
+
+ mt.segments[m] = f
+ }
+ if missingBinaries {
+ ui.PrintErr("Some binary filenames not available. Symbolization may be incomplete.\n" +
+ "Try setting PPROF_BINARY_PATH to the search path for local binaries.")
+ }
+ return mt, nil
+}
+
+// mappingTable contains the mechanisms for symbolization of a
+// profile.
+type mappingTable struct {
+ prof *profile.Profile
+ segments map[*profile.Mapping]plugin.ObjFile
+}
+
+// close releases any external processes being used for the mapping.
+func (mt *mappingTable) close() {
+ for _, segment := range mt.segments {
+ segment.Close()
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go b/src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go
new file mode 100644
index 0000000..7be3048
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go
@@ -0,0 +1,200 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package symbolz symbolizes a profile using the output from the symbolz
+// service.
+package symbolz
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/url"
+ "path"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+var (
+ symbolzRE = regexp.MustCompile(`(0x[[:xdigit:]]+)\s+(.*)`)
+)
+
+// Symbolize symbolizes profile p by parsing data returned by a symbolz
+// handler. syms receives the symbolz query (hex addresses separated by '+')
+// and returns the symbolz output in a string. If force is false, it will only
+// symbolize locations from mappings not already marked as HasFunctions. Never
+// attempts symbolization of addresses from unsymbolizable system
+// mappings as those may look negative - e.g. "[vsyscall]".
+func Symbolize(p *profile.Profile, force bool, sources plugin.MappingSources, syms func(string, string) ([]byte, error), ui plugin.UI) error {
+ for _, m := range p.Mapping {
+ if !force && m.HasFunctions {
+ // Only check for HasFunctions as symbolz only populates function names.
+ continue
+ }
+ // Skip well-known system mappings.
+ if m.Unsymbolizable() {
+ continue
+ }
+ mappingSources := sources[m.File]
+ if m.BuildID != "" {
+ mappingSources = append(mappingSources, sources[m.BuildID]...)
+ }
+ for _, source := range mappingSources {
+ if symz := symbolz(source.Source); symz != "" {
+ if err := symbolizeMapping(symz, int64(source.Start)-int64(m.Start), syms, m, p); err != nil {
+ return err
+ }
+ m.HasFunctions = true
+ break
+ }
+ }
+ }
+
+ return nil
+}
+
+// hasGperftoolsSuffix checks whether path ends with one of the suffixes listed in
+// pprof_remote_servers.html from the gperftools distribution
+func hasGperftoolsSuffix(path string) bool {
+ suffixes := []string{
+ "/pprof/heap",
+ "/pprof/growth",
+ "/pprof/profile",
+ "/pprof/pmuprofile",
+ "/pprof/contention",
+ }
+ for _, s := range suffixes {
+ if strings.HasSuffix(path, s) {
+ return true
+ }
+ }
+ return false
+}
+
+// symbolz returns the corresponding symbolz source for a profile URL.
+func symbolz(source string) string {
+ if url, err := url.Parse(source); err == nil && url.Host != "" {
+ // All paths in the net/http/pprof Go package contain /debug/pprof/
+ if strings.Contains(url.Path, "/debug/pprof/") || hasGperftoolsSuffix(url.Path) {
+ url.Path = path.Clean(url.Path + "/../symbol")
+ } else {
+ url.Path = "/symbolz"
+ }
+ url.RawQuery = ""
+ return url.String()
+ }
+
+ return ""
+}
+
+// symbolizeMapping symbolizes locations belonging to a Mapping by querying
+// a symbolz handler. An offset is applied to all addresses to take care of
+// normalization occurred for merged Mappings.
+func symbolizeMapping(source string, offset int64, syms func(string, string) ([]byte, error), m *profile.Mapping, p *profile.Profile) error {
+ // Construct query of addresses to symbolize.
+ var a []string
+ for _, l := range p.Location {
+ if l.Mapping == m && l.Address != 0 && len(l.Line) == 0 {
+ // Compensate for normalization.
+ addr, overflow := adjust(l.Address, offset)
+ if overflow {
+ return fmt.Errorf("cannot adjust address %d by %d, it would overflow (mapping %v)", l.Address, offset, l.Mapping)
+ }
+ a = append(a, fmt.Sprintf("%#x", addr))
+ }
+ }
+
+ if len(a) == 0 {
+ // No addresses to symbolize.
+ return nil
+ }
+
+ lines := make(map[uint64]profile.Line)
+ functions := make(map[string]*profile.Function)
+
+ b, err := syms(source, strings.Join(a, "+"))
+ if err != nil {
+ return err
+ }
+
+ buf := bytes.NewBuffer(b)
+ for {
+ l, err := buf.ReadString('\n')
+
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return err
+ }
+
+ if symbol := symbolzRE.FindStringSubmatch(l); len(symbol) == 3 {
+ origAddr, err := strconv.ParseUint(symbol[1], 0, 64)
+ if err != nil {
+ return fmt.Errorf("unexpected parse failure %s: %v", symbol[1], err)
+ }
+ // Reapply offset expected by the profile.
+ addr, overflow := adjust(origAddr, -offset)
+ if overflow {
+ return fmt.Errorf("cannot adjust symbolz address %d by %d, it would overflow", origAddr, -offset)
+ }
+
+ name := symbol[2]
+ fn := functions[name]
+ if fn == nil {
+ fn = &profile.Function{
+ ID: uint64(len(p.Function) + 1),
+ Name: name,
+ SystemName: name,
+ }
+ functions[name] = fn
+ p.Function = append(p.Function, fn)
+ }
+
+ lines[addr] = profile.Line{Function: fn}
+ }
+ }
+
+ for _, l := range p.Location {
+ if l.Mapping != m {
+ continue
+ }
+ if line, ok := lines[l.Address]; ok {
+ l.Line = []profile.Line{line}
+ }
+ }
+
+ return nil
+}
+
+// adjust shifts the specified address by the signed offset. It returns the
+// adjusted address. It signals that the address cannot be adjusted without an
+// overflow by returning true in the second return value.
+func adjust(addr uint64, offset int64) (uint64, bool) {
+ adj := uint64(int64(addr) + offset)
+ if offset < 0 {
+ if adj >= addr {
+ return 0, true
+ }
+ } else {
+ if adj < addr {
+ return 0, true
+ }
+ }
+ return adj, false
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go b/src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go
new file mode 100644
index 0000000..6c3bd0d
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go
@@ -0,0 +1,131 @@
+// Copyright 2018 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package transport provides a mechanism to send requests with https cert,
+// key, and CA.
+package transport
+
+import (
+ "crypto/tls"
+ "crypto/x509"
+ "fmt"
+ "net/http"
+ "os"
+ "sync"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+type transport struct {
+ cert *string
+ key *string
+ ca *string
+ caCertPool *x509.CertPool
+ certs []tls.Certificate
+ initOnce sync.Once
+ initErr error
+}
+
+const extraUsage = ` -tls_cert TLS client certificate file for fetching profile and symbols
+ -tls_key TLS private key file for fetching profile and symbols
+ -tls_ca TLS CA certs file for fetching profile and symbols`
+
+// New returns a round tripper for making requests with the
+// specified cert, key, and ca. The flags tls_cert, tls_key, and tls_ca are
+// added to the flagset to allow a user to specify the cert, key, and ca. If
+// the flagset is nil, no flags will be added, and users will not be able to
+// use these flags.
+func New(flagset plugin.FlagSet) http.RoundTripper {
+ if flagset == nil {
+ return &transport{}
+ }
+ flagset.AddExtraUsage(extraUsage)
+ return &transport{
+ cert: flagset.String("tls_cert", "", "TLS client certificate file for fetching profile and symbols"),
+ key: flagset.String("tls_key", "", "TLS private key file for fetching profile and symbols"),
+ ca: flagset.String("tls_ca", "", "TLS CA certs file for fetching profile and symbols"),
+ }
+}
+
+// initialize uses the cert, key, and ca to initialize the certs
+// to use these when making requests.
+func (tr *transport) initialize() error {
+ var cert, key, ca string
+ if tr.cert != nil {
+ cert = *tr.cert
+ }
+ if tr.key != nil {
+ key = *tr.key
+ }
+ if tr.ca != nil {
+ ca = *tr.ca
+ }
+
+ if cert != "" && key != "" {
+ tlsCert, err := tls.LoadX509KeyPair(cert, key)
+ if err != nil {
+ return fmt.Errorf("could not load certificate/key pair specified by -tls_cert and -tls_key: %v", err)
+ }
+ tr.certs = []tls.Certificate{tlsCert}
+ } else if cert == "" && key != "" {
+ return fmt.Errorf("-tls_key is specified, so -tls_cert must also be specified")
+ } else if cert != "" && key == "" {
+ return fmt.Errorf("-tls_cert is specified, so -tls_key must also be specified")
+ }
+
+ if ca != "" {
+ caCertPool := x509.NewCertPool()
+ caCert, err := os.ReadFile(ca)
+ if err != nil {
+ return fmt.Errorf("could not load CA specified by -tls_ca: %v", err)
+ }
+ caCertPool.AppendCertsFromPEM(caCert)
+ tr.caCertPool = caCertPool
+ }
+
+ return nil
+}
+
+// RoundTrip executes a single HTTP transaction, returning
+// a Response for the provided Request.
+func (tr *transport) RoundTrip(req *http.Request) (*http.Response, error) {
+ tr.initOnce.Do(func() {
+ tr.initErr = tr.initialize()
+ })
+ if tr.initErr != nil {
+ return nil, tr.initErr
+ }
+
+ tlsConfig := &tls.Config{
+ RootCAs: tr.caCertPool,
+ Certificates: tr.certs,
+ }
+
+ if req.URL.Scheme == "https+insecure" {
+ // Make shallow copy of request, and req.URL, so the request's URL can be
+ // modified.
+ r := *req
+ *r.URL = *req.URL
+ req = &r
+ tlsConfig.InsecureSkipVerify = true
+ req.URL.Scheme = "https"
+ }
+
+ transport := http.Transport{
+ Proxy: http.ProxyFromEnvironment,
+ TLSClientConfig: tlsConfig,
+ }
+
+ return transport.RoundTrip(req)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/encode.go b/src/cmd/vendor/github.com/google/pprof/profile/encode.go
new file mode 100644
index 0000000..182c926
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/encode.go
@@ -0,0 +1,588 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+import (
+ "errors"
+ "sort"
+ "strings"
+)
+
+func (p *Profile) decoder() []decoder {
+ return profileDecoder
+}
+
+// preEncode populates the unexported fields to be used by encode
+// (with suffix X) from the corresponding exported fields. The
+// exported fields are cleared up to facilitate testing.
+func (p *Profile) preEncode() {
+ strings := make(map[string]int)
+ addString(strings, "")
+
+ for _, st := range p.SampleType {
+ st.typeX = addString(strings, st.Type)
+ st.unitX = addString(strings, st.Unit)
+ }
+
+ for _, s := range p.Sample {
+ s.labelX = nil
+ var keys []string
+ for k := range s.Label {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ vs := s.Label[k]
+ for _, v := range vs {
+ s.labelX = append(s.labelX,
+ label{
+ keyX: addString(strings, k),
+ strX: addString(strings, v),
+ },
+ )
+ }
+ }
+ var numKeys []string
+ for k := range s.NumLabel {
+ numKeys = append(numKeys, k)
+ }
+ sort.Strings(numKeys)
+ for _, k := range numKeys {
+ keyX := addString(strings, k)
+ vs := s.NumLabel[k]
+ units := s.NumUnit[k]
+ for i, v := range vs {
+ var unitX int64
+ if len(units) != 0 {
+ unitX = addString(strings, units[i])
+ }
+ s.labelX = append(s.labelX,
+ label{
+ keyX: keyX,
+ numX: v,
+ unitX: unitX,
+ },
+ )
+ }
+ }
+ s.locationIDX = make([]uint64, len(s.Location))
+ for i, loc := range s.Location {
+ s.locationIDX[i] = loc.ID
+ }
+ }
+
+ for _, m := range p.Mapping {
+ m.fileX = addString(strings, m.File)
+ m.buildIDX = addString(strings, m.BuildID)
+ }
+
+ for _, l := range p.Location {
+ for i, ln := range l.Line {
+ if ln.Function != nil {
+ l.Line[i].functionIDX = ln.Function.ID
+ } else {
+ l.Line[i].functionIDX = 0
+ }
+ }
+ if l.Mapping != nil {
+ l.mappingIDX = l.Mapping.ID
+ } else {
+ l.mappingIDX = 0
+ }
+ }
+ for _, f := range p.Function {
+ f.nameX = addString(strings, f.Name)
+ f.systemNameX = addString(strings, f.SystemName)
+ f.filenameX = addString(strings, f.Filename)
+ }
+
+ p.dropFramesX = addString(strings, p.DropFrames)
+ p.keepFramesX = addString(strings, p.KeepFrames)
+
+ if pt := p.PeriodType; pt != nil {
+ pt.typeX = addString(strings, pt.Type)
+ pt.unitX = addString(strings, pt.Unit)
+ }
+
+ p.commentX = nil
+ for _, c := range p.Comments {
+ p.commentX = append(p.commentX, addString(strings, c))
+ }
+
+ p.defaultSampleTypeX = addString(strings, p.DefaultSampleType)
+
+ p.stringTable = make([]string, len(strings))
+ for s, i := range strings {
+ p.stringTable[i] = s
+ }
+}
+
+func (p *Profile) encode(b *buffer) {
+ for _, x := range p.SampleType {
+ encodeMessage(b, 1, x)
+ }
+ for _, x := range p.Sample {
+ encodeMessage(b, 2, x)
+ }
+ for _, x := range p.Mapping {
+ encodeMessage(b, 3, x)
+ }
+ for _, x := range p.Location {
+ encodeMessage(b, 4, x)
+ }
+ for _, x := range p.Function {
+ encodeMessage(b, 5, x)
+ }
+ encodeStrings(b, 6, p.stringTable)
+ encodeInt64Opt(b, 7, p.dropFramesX)
+ encodeInt64Opt(b, 8, p.keepFramesX)
+ encodeInt64Opt(b, 9, p.TimeNanos)
+ encodeInt64Opt(b, 10, p.DurationNanos)
+ if pt := p.PeriodType; pt != nil && (pt.typeX != 0 || pt.unitX != 0) {
+ encodeMessage(b, 11, p.PeriodType)
+ }
+ encodeInt64Opt(b, 12, p.Period)
+ encodeInt64s(b, 13, p.commentX)
+ encodeInt64(b, 14, p.defaultSampleTypeX)
+}
+
+var profileDecoder = []decoder{
+ nil, // 0
+ // repeated ValueType sample_type = 1
+ func(b *buffer, m message) error {
+ x := new(ValueType)
+ pp := m.(*Profile)
+ pp.SampleType = append(pp.SampleType, x)
+ return decodeMessage(b, x)
+ },
+ // repeated Sample sample = 2
+ func(b *buffer, m message) error {
+ x := new(Sample)
+ pp := m.(*Profile)
+ pp.Sample = append(pp.Sample, x)
+ return decodeMessage(b, x)
+ },
+ // repeated Mapping mapping = 3
+ func(b *buffer, m message) error {
+ x := new(Mapping)
+ pp := m.(*Profile)
+ pp.Mapping = append(pp.Mapping, x)
+ return decodeMessage(b, x)
+ },
+ // repeated Location location = 4
+ func(b *buffer, m message) error {
+ x := new(Location)
+ x.Line = b.tmpLines[:0] // Use shared space temporarily
+ pp := m.(*Profile)
+ pp.Location = append(pp.Location, x)
+ err := decodeMessage(b, x)
+ b.tmpLines = x.Line[:0]
+ // Copy to shrink size and detach from shared space.
+ x.Line = append([]Line(nil), x.Line...)
+ return err
+ },
+ // repeated Function function = 5
+ func(b *buffer, m message) error {
+ x := new(Function)
+ pp := m.(*Profile)
+ pp.Function = append(pp.Function, x)
+ return decodeMessage(b, x)
+ },
+ // repeated string string_table = 6
+ func(b *buffer, m message) error {
+ err := decodeStrings(b, &m.(*Profile).stringTable)
+ if err != nil {
+ return err
+ }
+ if m.(*Profile).stringTable[0] != "" {
+ return errors.New("string_table[0] must be ''")
+ }
+ return nil
+ },
+ // int64 drop_frames = 7
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).dropFramesX) },
+ // int64 keep_frames = 8
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).keepFramesX) },
+ // int64 time_nanos = 9
+ func(b *buffer, m message) error {
+ if m.(*Profile).TimeNanos != 0 {
+ return errConcatProfile
+ }
+ return decodeInt64(b, &m.(*Profile).TimeNanos)
+ },
+ // int64 duration_nanos = 10
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).DurationNanos) },
+ // ValueType period_type = 11
+ func(b *buffer, m message) error {
+ x := new(ValueType)
+ pp := m.(*Profile)
+ pp.PeriodType = x
+ return decodeMessage(b, x)
+ },
+ // int64 period = 12
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).Period) },
+ // repeated int64 comment = 13
+ func(b *buffer, m message) error { return decodeInt64s(b, &m.(*Profile).commentX) },
+ // int64 defaultSampleType = 14
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).defaultSampleTypeX) },
+}
+
+// postDecode takes the unexported fields populated by decode (with
+// suffix X) and populates the corresponding exported fields.
+// The unexported fields are cleared up to facilitate testing.
+func (p *Profile) postDecode() error {
+ var err error
+ mappings := make(map[uint64]*Mapping, len(p.Mapping))
+ mappingIds := make([]*Mapping, len(p.Mapping)+1)
+ for _, m := range p.Mapping {
+ m.File, err = getString(p.stringTable, &m.fileX, err)
+ m.BuildID, err = getString(p.stringTable, &m.buildIDX, err)
+ if m.ID < uint64(len(mappingIds)) {
+ mappingIds[m.ID] = m
+ } else {
+ mappings[m.ID] = m
+ }
+
+ // If this a main linux kernel mapping with a relocation symbol suffix
+ // ("[kernel.kallsyms]_text"), extract said suffix.
+ // It is fairly hacky to handle at this level, but the alternatives appear even worse.
+ const prefix = "[kernel.kallsyms]"
+ if strings.HasPrefix(m.File, prefix) {
+ m.KernelRelocationSymbol = m.File[len(prefix):]
+ }
+ }
+
+ functions := make(map[uint64]*Function, len(p.Function))
+ functionIds := make([]*Function, len(p.Function)+1)
+ for _, f := range p.Function {
+ f.Name, err = getString(p.stringTable, &f.nameX, err)
+ f.SystemName, err = getString(p.stringTable, &f.systemNameX, err)
+ f.Filename, err = getString(p.stringTable, &f.filenameX, err)
+ if f.ID < uint64(len(functionIds)) {
+ functionIds[f.ID] = f
+ } else {
+ functions[f.ID] = f
+ }
+ }
+
+ locations := make(map[uint64]*Location, len(p.Location))
+ locationIds := make([]*Location, len(p.Location)+1)
+ for _, l := range p.Location {
+ if id := l.mappingIDX; id < uint64(len(mappingIds)) {
+ l.Mapping = mappingIds[id]
+ } else {
+ l.Mapping = mappings[id]
+ }
+ l.mappingIDX = 0
+ for i, ln := range l.Line {
+ if id := ln.functionIDX; id != 0 {
+ l.Line[i].functionIDX = 0
+ if id < uint64(len(functionIds)) {
+ l.Line[i].Function = functionIds[id]
+ } else {
+ l.Line[i].Function = functions[id]
+ }
+ }
+ }
+ if l.ID < uint64(len(locationIds)) {
+ locationIds[l.ID] = l
+ } else {
+ locations[l.ID] = l
+ }
+ }
+
+ for _, st := range p.SampleType {
+ st.Type, err = getString(p.stringTable, &st.typeX, err)
+ st.Unit, err = getString(p.stringTable, &st.unitX, err)
+ }
+
+ // Pre-allocate space for all locations.
+ numLocations := 0
+ for _, s := range p.Sample {
+ numLocations += len(s.locationIDX)
+ }
+ locBuffer := make([]*Location, numLocations)
+
+ for _, s := range p.Sample {
+ if len(s.labelX) > 0 {
+ labels := make(map[string][]string, len(s.labelX))
+ numLabels := make(map[string][]int64, len(s.labelX))
+ numUnits := make(map[string][]string, len(s.labelX))
+ for _, l := range s.labelX {
+ var key, value string
+ key, err = getString(p.stringTable, &l.keyX, err)
+ if l.strX != 0 {
+ value, err = getString(p.stringTable, &l.strX, err)
+ labels[key] = append(labels[key], value)
+ } else if l.numX != 0 || l.unitX != 0 {
+ numValues := numLabels[key]
+ units := numUnits[key]
+ if l.unitX != 0 {
+ var unit string
+ unit, err = getString(p.stringTable, &l.unitX, err)
+ units = padStringArray(units, len(numValues))
+ numUnits[key] = append(units, unit)
+ }
+ numLabels[key] = append(numLabels[key], l.numX)
+ }
+ }
+ if len(labels) > 0 {
+ s.Label = labels
+ }
+ if len(numLabels) > 0 {
+ s.NumLabel = numLabels
+ for key, units := range numUnits {
+ if len(units) > 0 {
+ numUnits[key] = padStringArray(units, len(numLabels[key]))
+ }
+ }
+ s.NumUnit = numUnits
+ }
+ }
+
+ s.Location = locBuffer[:len(s.locationIDX)]
+ locBuffer = locBuffer[len(s.locationIDX):]
+ for i, lid := range s.locationIDX {
+ if lid < uint64(len(locationIds)) {
+ s.Location[i] = locationIds[lid]
+ } else {
+ s.Location[i] = locations[lid]
+ }
+ }
+ s.locationIDX = nil
+ }
+
+ p.DropFrames, err = getString(p.stringTable, &p.dropFramesX, err)
+ p.KeepFrames, err = getString(p.stringTable, &p.keepFramesX, err)
+
+ if pt := p.PeriodType; pt == nil {
+ p.PeriodType = &ValueType{}
+ }
+
+ if pt := p.PeriodType; pt != nil {
+ pt.Type, err = getString(p.stringTable, &pt.typeX, err)
+ pt.Unit, err = getString(p.stringTable, &pt.unitX, err)
+ }
+
+ for _, i := range p.commentX {
+ var c string
+ c, err = getString(p.stringTable, &i, err)
+ p.Comments = append(p.Comments, c)
+ }
+
+ p.commentX = nil
+ p.DefaultSampleType, err = getString(p.stringTable, &p.defaultSampleTypeX, err)
+ p.stringTable = nil
+ return err
+}
+
+// padStringArray pads arr with enough empty strings to make arr
+// length l when arr's length is less than l.
+func padStringArray(arr []string, l int) []string {
+ if l <= len(arr) {
+ return arr
+ }
+ return append(arr, make([]string, l-len(arr))...)
+}
+
+func (p *ValueType) decoder() []decoder {
+ return valueTypeDecoder
+}
+
+func (p *ValueType) encode(b *buffer) {
+ encodeInt64Opt(b, 1, p.typeX)
+ encodeInt64Opt(b, 2, p.unitX)
+}
+
+var valueTypeDecoder = []decoder{
+ nil, // 0
+ // optional int64 type = 1
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*ValueType).typeX) },
+ // optional int64 unit = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*ValueType).unitX) },
+}
+
+func (p *Sample) decoder() []decoder {
+ return sampleDecoder
+}
+
+func (p *Sample) encode(b *buffer) {
+ encodeUint64s(b, 1, p.locationIDX)
+ encodeInt64s(b, 2, p.Value)
+ for _, x := range p.labelX {
+ encodeMessage(b, 3, x)
+ }
+}
+
+var sampleDecoder = []decoder{
+ nil, // 0
+ // repeated uint64 location = 1
+ func(b *buffer, m message) error { return decodeUint64s(b, &m.(*Sample).locationIDX) },
+ // repeated int64 value = 2
+ func(b *buffer, m message) error { return decodeInt64s(b, &m.(*Sample).Value) },
+ // repeated Label label = 3
+ func(b *buffer, m message) error {
+ s := m.(*Sample)
+ n := len(s.labelX)
+ s.labelX = append(s.labelX, label{})
+ return decodeMessage(b, &s.labelX[n])
+ },
+}
+
+func (p label) decoder() []decoder {
+ return labelDecoder
+}
+
+func (p label) encode(b *buffer) {
+ encodeInt64Opt(b, 1, p.keyX)
+ encodeInt64Opt(b, 2, p.strX)
+ encodeInt64Opt(b, 3, p.numX)
+ encodeInt64Opt(b, 4, p.unitX)
+}
+
+var labelDecoder = []decoder{
+ nil, // 0
+ // optional int64 key = 1
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).keyX) },
+ // optional int64 str = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).strX) },
+ // optional int64 num = 3
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).numX) },
+ // optional int64 num = 4
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).unitX) },
+}
+
+func (p *Mapping) decoder() []decoder {
+ return mappingDecoder
+}
+
+func (p *Mapping) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.ID)
+ encodeUint64Opt(b, 2, p.Start)
+ encodeUint64Opt(b, 3, p.Limit)
+ encodeUint64Opt(b, 4, p.Offset)
+ encodeInt64Opt(b, 5, p.fileX)
+ encodeInt64Opt(b, 6, p.buildIDX)
+ encodeBoolOpt(b, 7, p.HasFunctions)
+ encodeBoolOpt(b, 8, p.HasFilenames)
+ encodeBoolOpt(b, 9, p.HasLineNumbers)
+ encodeBoolOpt(b, 10, p.HasInlineFrames)
+}
+
+var mappingDecoder = []decoder{
+ nil, // 0
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).ID) }, // optional uint64 id = 1
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Start) }, // optional uint64 memory_offset = 2
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Limit) }, // optional uint64 memory_limit = 3
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Offset) }, // optional uint64 file_offset = 4
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Mapping).fileX) }, // optional int64 filename = 5
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Mapping).buildIDX) }, // optional int64 build_id = 6
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasFunctions) }, // optional bool has_functions = 7
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasFilenames) }, // optional bool has_filenames = 8
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasLineNumbers) }, // optional bool has_line_numbers = 9
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasInlineFrames) }, // optional bool has_inline_frames = 10
+}
+
+func (p *Location) decoder() []decoder {
+ return locationDecoder
+}
+
+func (p *Location) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.ID)
+ encodeUint64Opt(b, 2, p.mappingIDX)
+ encodeUint64Opt(b, 3, p.Address)
+ for i := range p.Line {
+ encodeMessage(b, 4, &p.Line[i])
+ }
+ encodeBoolOpt(b, 5, p.IsFolded)
+}
+
+var locationDecoder = []decoder{
+ nil, // 0
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).ID) }, // optional uint64 id = 1;
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).mappingIDX) }, // optional uint64 mapping_id = 2;
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).Address) }, // optional uint64 address = 3;
+ func(b *buffer, m message) error { // repeated Line line = 4
+ pp := m.(*Location)
+ n := len(pp.Line)
+ pp.Line = append(pp.Line, Line{})
+ return decodeMessage(b, &pp.Line[n])
+ },
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Location).IsFolded) }, // optional bool is_folded = 5;
+}
+
+func (p *Line) decoder() []decoder {
+ return lineDecoder
+}
+
+func (p *Line) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.functionIDX)
+ encodeInt64Opt(b, 2, p.Line)
+}
+
+var lineDecoder = []decoder{
+ nil, // 0
+ // optional uint64 function_id = 1
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) },
+ // optional int64 line = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) },
+}
+
+func (p *Function) decoder() []decoder {
+ return functionDecoder
+}
+
+func (p *Function) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.ID)
+ encodeInt64Opt(b, 2, p.nameX)
+ encodeInt64Opt(b, 3, p.systemNameX)
+ encodeInt64Opt(b, 4, p.filenameX)
+ encodeInt64Opt(b, 5, p.StartLine)
+}
+
+var functionDecoder = []decoder{
+ nil, // 0
+ // optional uint64 id = 1
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Function).ID) },
+ // optional int64 function_name = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).nameX) },
+ // optional int64 function_system_name = 3
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).systemNameX) },
+ // repeated int64 filename = 4
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).filenameX) },
+ // optional int64 start_line = 5
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).StartLine) },
+}
+
+func addString(strings map[string]int, s string) int64 {
+ i, ok := strings[s]
+ if !ok {
+ i = len(strings)
+ strings[s] = i
+ }
+ return int64(i)
+}
+
+func getString(strings []string, strng *int64, err error) (string, error) {
+ if err != nil {
+ return "", err
+ }
+ s := int(*strng)
+ if s < 0 || s >= len(strings) {
+ return "", errMalformed
+ }
+ *strng = 0
+ return strings[s], nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/filter.go b/src/cmd/vendor/github.com/google/pprof/profile/filter.go
new file mode 100644
index 0000000..c794b93
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/filter.go
@@ -0,0 +1,274 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+// Implements methods to filter samples from profiles.
+
+import "regexp"
+
+// FilterSamplesByName filters the samples in a profile and only keeps
+// samples where at least one frame matches focus but none match ignore.
+// Returns true is the corresponding regexp matched at least one sample.
+func (p *Profile) FilterSamplesByName(focus, ignore, hide, show *regexp.Regexp) (fm, im, hm, hnm bool) {
+ if focus == nil && ignore == nil && hide == nil && show == nil {
+ fm = true // Missing focus implies a match
+ return
+ }
+ focusOrIgnore := make(map[uint64]bool)
+ hidden := make(map[uint64]bool)
+ for _, l := range p.Location {
+ if ignore != nil && l.matchesName(ignore) {
+ im = true
+ focusOrIgnore[l.ID] = false
+ } else if focus == nil || l.matchesName(focus) {
+ fm = true
+ focusOrIgnore[l.ID] = true
+ }
+
+ if hide != nil && l.matchesName(hide) {
+ hm = true
+ l.Line = l.unmatchedLines(hide)
+ if len(l.Line) == 0 {
+ hidden[l.ID] = true
+ }
+ }
+ if show != nil {
+ l.Line = l.matchedLines(show)
+ if len(l.Line) == 0 {
+ hidden[l.ID] = true
+ } else {
+ hnm = true
+ }
+ }
+ }
+
+ s := make([]*Sample, 0, len(p.Sample))
+ for _, sample := range p.Sample {
+ if focusedAndNotIgnored(sample.Location, focusOrIgnore) {
+ if len(hidden) > 0 {
+ var locs []*Location
+ for _, loc := range sample.Location {
+ if !hidden[loc.ID] {
+ locs = append(locs, loc)
+ }
+ }
+ if len(locs) == 0 {
+ // Remove sample with no locations (by not adding it to s).
+ continue
+ }
+ sample.Location = locs
+ }
+ s = append(s, sample)
+ }
+ }
+ p.Sample = s
+
+ return
+}
+
+// ShowFrom drops all stack frames above the highest matching frame and returns
+// whether a match was found. If showFrom is nil it returns false and does not
+// modify the profile.
+//
+// Example: consider a sample with frames [A, B, C, B], where A is the root.
+// ShowFrom(nil) returns false and has frames [A, B, C, B].
+// ShowFrom(A) returns true and has frames [A, B, C, B].
+// ShowFrom(B) returns true and has frames [B, C, B].
+// ShowFrom(C) returns true and has frames [C, B].
+// ShowFrom(D) returns false and drops the sample because no frames remain.
+func (p *Profile) ShowFrom(showFrom *regexp.Regexp) (matched bool) {
+ if showFrom == nil {
+ return false
+ }
+ // showFromLocs stores location IDs that matched ShowFrom.
+ showFromLocs := make(map[uint64]bool)
+ // Apply to locations.
+ for _, loc := range p.Location {
+ if filterShowFromLocation(loc, showFrom) {
+ showFromLocs[loc.ID] = true
+ matched = true
+ }
+ }
+ // For all samples, strip locations after the highest matching one.
+ s := make([]*Sample, 0, len(p.Sample))
+ for _, sample := range p.Sample {
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ if showFromLocs[sample.Location[i].ID] {
+ sample.Location = sample.Location[:i+1]
+ s = append(s, sample)
+ break
+ }
+ }
+ }
+ p.Sample = s
+ return matched
+}
+
+// filterShowFromLocation tests a showFrom regex against a location, removes
+// lines after the last match and returns whether a match was found. If the
+// mapping is matched, then all lines are kept.
+func filterShowFromLocation(loc *Location, showFrom *regexp.Regexp) bool {
+ if m := loc.Mapping; m != nil && showFrom.MatchString(m.File) {
+ return true
+ }
+ if i := loc.lastMatchedLineIndex(showFrom); i >= 0 {
+ loc.Line = loc.Line[:i+1]
+ return true
+ }
+ return false
+}
+
+// lastMatchedLineIndex returns the index of the last line that matches a regex,
+// or -1 if no match is found.
+func (loc *Location) lastMatchedLineIndex(re *regexp.Regexp) int {
+ for i := len(loc.Line) - 1; i >= 0; i-- {
+ if fn := loc.Line[i].Function; fn != nil {
+ if re.MatchString(fn.Name) || re.MatchString(fn.Filename) {
+ return i
+ }
+ }
+ }
+ return -1
+}
+
+// FilterTagsByName filters the tags in a profile and only keeps
+// tags that match show and not hide.
+func (p *Profile) FilterTagsByName(show, hide *regexp.Regexp) (sm, hm bool) {
+ matchRemove := func(name string) bool {
+ matchShow := show == nil || show.MatchString(name)
+ matchHide := hide != nil && hide.MatchString(name)
+
+ if matchShow {
+ sm = true
+ }
+ if matchHide {
+ hm = true
+ }
+ return !matchShow || matchHide
+ }
+ for _, s := range p.Sample {
+ for lab := range s.Label {
+ if matchRemove(lab) {
+ delete(s.Label, lab)
+ }
+ }
+ for lab := range s.NumLabel {
+ if matchRemove(lab) {
+ delete(s.NumLabel, lab)
+ }
+ }
+ }
+ return
+}
+
+// matchesName returns whether the location matches the regular
+// expression. It checks any available function names, file names, and
+// mapping object filename.
+func (loc *Location) matchesName(re *regexp.Regexp) bool {
+ for _, ln := range loc.Line {
+ if fn := ln.Function; fn != nil {
+ if re.MatchString(fn.Name) || re.MatchString(fn.Filename) {
+ return true
+ }
+ }
+ }
+ if m := loc.Mapping; m != nil && re.MatchString(m.File) {
+ return true
+ }
+ return false
+}
+
+// unmatchedLines returns the lines in the location that do not match
+// the regular expression.
+func (loc *Location) unmatchedLines(re *regexp.Regexp) []Line {
+ if m := loc.Mapping; m != nil && re.MatchString(m.File) {
+ return nil
+ }
+ var lines []Line
+ for _, ln := range loc.Line {
+ if fn := ln.Function; fn != nil {
+ if re.MatchString(fn.Name) || re.MatchString(fn.Filename) {
+ continue
+ }
+ }
+ lines = append(lines, ln)
+ }
+ return lines
+}
+
+// matchedLines returns the lines in the location that match
+// the regular expression.
+func (loc *Location) matchedLines(re *regexp.Regexp) []Line {
+ if m := loc.Mapping; m != nil && re.MatchString(m.File) {
+ return loc.Line
+ }
+ var lines []Line
+ for _, ln := range loc.Line {
+ if fn := ln.Function; fn != nil {
+ if !re.MatchString(fn.Name) && !re.MatchString(fn.Filename) {
+ continue
+ }
+ }
+ lines = append(lines, ln)
+ }
+ return lines
+}
+
+// focusedAndNotIgnored looks up a slice of ids against a map of
+// focused/ignored locations. The map only contains locations that are
+// explicitly focused or ignored. Returns whether there is at least
+// one focused location but no ignored locations.
+func focusedAndNotIgnored(locs []*Location, m map[uint64]bool) bool {
+ var f bool
+ for _, loc := range locs {
+ if focus, focusOrIgnore := m[loc.ID]; focusOrIgnore {
+ if focus {
+ // Found focused location. Must keep searching in case there
+ // is an ignored one as well.
+ f = true
+ } else {
+ // Found ignored location. Can return false right away.
+ return false
+ }
+ }
+ }
+ return f
+}
+
+// TagMatch selects tags for filtering
+type TagMatch func(s *Sample) bool
+
+// FilterSamplesByTag removes all samples from the profile, except
+// those that match focus and do not match the ignore regular
+// expression.
+func (p *Profile) FilterSamplesByTag(focus, ignore TagMatch) (fm, im bool) {
+ samples := make([]*Sample, 0, len(p.Sample))
+ for _, s := range p.Sample {
+ focused, ignored := true, false
+ if focus != nil {
+ focused = focus(s)
+ }
+ if ignore != nil {
+ ignored = ignore(s)
+ }
+ fm = fm || focused
+ im = im || ignored
+ if focused && !ignored {
+ samples = append(samples, s)
+ }
+ }
+ p.Sample = samples
+ return
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/index.go b/src/cmd/vendor/github.com/google/pprof/profile/index.go
new file mode 100644
index 0000000..bef1d60
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/index.go
@@ -0,0 +1,64 @@
+// Copyright 2016 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// SampleIndexByName returns the appropriate index for a value of sample index.
+// If numeric, it returns the number, otherwise it looks up the text in the
+// profile sample types.
+func (p *Profile) SampleIndexByName(sampleIndex string) (int, error) {
+ if sampleIndex == "" {
+ if dst := p.DefaultSampleType; dst != "" {
+ for i, t := range sampleTypes(p) {
+ if t == dst {
+ return i, nil
+ }
+ }
+ }
+ // By default select the last sample value
+ return len(p.SampleType) - 1, nil
+ }
+ if i, err := strconv.Atoi(sampleIndex); err == nil {
+ if i < 0 || i >= len(p.SampleType) {
+ return 0, fmt.Errorf("sample_index %s is outside the range [0..%d]", sampleIndex, len(p.SampleType)-1)
+ }
+ return i, nil
+ }
+
+ // Remove the inuse_ prefix to support legacy pprof options
+ // "inuse_space" and "inuse_objects" for profiles containing types
+ // "space" and "objects".
+ noInuse := strings.TrimPrefix(sampleIndex, "inuse_")
+ for i, t := range p.SampleType {
+ if t.Type == sampleIndex || t.Type == noInuse {
+ return i, nil
+ }
+ }
+
+ return 0, fmt.Errorf("sample_index %q must be one of: %v", sampleIndex, sampleTypes(p))
+}
+
+func sampleTypes(p *Profile) []string {
+ types := make([]string, len(p.SampleType))
+ for i, t := range p.SampleType {
+ types[i] = t.Type
+ }
+ return types
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go b/src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go
new file mode 100644
index 0000000..91f45e5
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go
@@ -0,0 +1,315 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file implements parsers to convert java legacy profiles into
+// the profile.proto format.
+
+package profile
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var (
+ attributeRx = regexp.MustCompile(`([\w ]+)=([\w ]+)`)
+ javaSampleRx = regexp.MustCompile(` *(\d+) +(\d+) +@ +([ x0-9a-f]*)`)
+ javaLocationRx = regexp.MustCompile(`^\s*0x([[:xdigit:]]+)\s+(.*)\s*$`)
+ javaLocationFileLineRx = regexp.MustCompile(`^(.*)\s+\((.+):(-?[[:digit:]]+)\)$`)
+ javaLocationPathRx = regexp.MustCompile(`^(.*)\s+\((.*)\)$`)
+)
+
+// javaCPUProfile returns a new Profile from profilez data.
+// b is the profile bytes after the header, period is the profiling
+// period, and parse is a function to parse 8-byte chunks from the
+// profile in its native endianness.
+func javaCPUProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte)) (*Profile, error) {
+ p := &Profile{
+ Period: period * 1000,
+ PeriodType: &ValueType{Type: "cpu", Unit: "nanoseconds"},
+ SampleType: []*ValueType{{Type: "samples", Unit: "count"}, {Type: "cpu", Unit: "nanoseconds"}},
+ }
+ var err error
+ var locs map[uint64]*Location
+ if b, locs, err = parseCPUSamples(b, parse, false, p); err != nil {
+ return nil, err
+ }
+
+ if err = parseJavaLocations(b, locs, p); err != nil {
+ return nil, err
+ }
+
+ // Strip out addresses for better merge.
+ if err = p.Aggregate(true, true, true, true, false); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+// parseJavaProfile returns a new profile from heapz or contentionz
+// data. b is the profile bytes after the header.
+func parseJavaProfile(b []byte) (*Profile, error) {
+ h := bytes.SplitAfterN(b, []byte("\n"), 2)
+ if len(h) < 2 {
+ return nil, errUnrecognized
+ }
+
+ p := &Profile{
+ PeriodType: &ValueType{},
+ }
+ header := string(bytes.TrimSpace(h[0]))
+
+ var err error
+ var pType string
+ switch header {
+ case "--- heapz 1 ---":
+ pType = "heap"
+ case "--- contentionz 1 ---":
+ pType = "contention"
+ default:
+ return nil, errUnrecognized
+ }
+
+ if b, err = parseJavaHeader(pType, h[1], p); err != nil {
+ return nil, err
+ }
+ var locs map[uint64]*Location
+ if b, locs, err = parseJavaSamples(pType, b, p); err != nil {
+ return nil, err
+ }
+ if err = parseJavaLocations(b, locs, p); err != nil {
+ return nil, err
+ }
+
+ // Strip out addresses for better merge.
+ if err = p.Aggregate(true, true, true, true, false); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+// parseJavaHeader parses the attribute section on a java profile and
+// populates a profile. Returns the remainder of the buffer after all
+// attributes.
+func parseJavaHeader(pType string, b []byte, p *Profile) ([]byte, error) {
+ nextNewLine := bytes.IndexByte(b, byte('\n'))
+ for nextNewLine != -1 {
+ line := string(bytes.TrimSpace(b[0:nextNewLine]))
+ if line != "" {
+ h := attributeRx.FindStringSubmatch(line)
+ if h == nil {
+ // Not a valid attribute, exit.
+ return b, nil
+ }
+
+ attribute, value := strings.TrimSpace(h[1]), strings.TrimSpace(h[2])
+ var err error
+ switch pType + "/" + attribute {
+ case "heap/format", "cpu/format", "contention/format":
+ if value != "java" {
+ return nil, errUnrecognized
+ }
+ case "heap/resolution":
+ p.SampleType = []*ValueType{
+ {Type: "inuse_objects", Unit: "count"},
+ {Type: "inuse_space", Unit: value},
+ }
+ case "contention/resolution":
+ p.SampleType = []*ValueType{
+ {Type: "contentions", Unit: "count"},
+ {Type: "delay", Unit: value},
+ }
+ case "contention/sampling period":
+ p.PeriodType = &ValueType{
+ Type: "contentions", Unit: "count",
+ }
+ if p.Period, err = strconv.ParseInt(value, 0, 64); err != nil {
+ return nil, fmt.Errorf("failed to parse attribute %s: %v", line, err)
+ }
+ case "contention/ms since reset":
+ millis, err := strconv.ParseInt(value, 0, 64)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse attribute %s: %v", line, err)
+ }
+ p.DurationNanos = millis * 1000 * 1000
+ default:
+ return nil, errUnrecognized
+ }
+ }
+ // Grab next line.
+ b = b[nextNewLine+1:]
+ nextNewLine = bytes.IndexByte(b, byte('\n'))
+ }
+ return b, nil
+}
+
+// parseJavaSamples parses the samples from a java profile and
+// populates the Samples in a profile. Returns the remainder of the
+// buffer after the samples.
+func parseJavaSamples(pType string, b []byte, p *Profile) ([]byte, map[uint64]*Location, error) {
+ nextNewLine := bytes.IndexByte(b, byte('\n'))
+ locs := make(map[uint64]*Location)
+ for nextNewLine != -1 {
+ line := string(bytes.TrimSpace(b[0:nextNewLine]))
+ if line != "" {
+ sample := javaSampleRx.FindStringSubmatch(line)
+ if sample == nil {
+ // Not a valid sample, exit.
+ return b, locs, nil
+ }
+
+ // Java profiles have data/fields inverted compared to other
+ // profile types.
+ var err error
+ value1, value2, value3 := sample[2], sample[1], sample[3]
+ addrs, err := parseHexAddresses(value3)
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ var sloc []*Location
+ for _, addr := range addrs {
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+ s := &Sample{
+ Value: make([]int64, 2),
+ Location: sloc,
+ }
+
+ if s.Value[0], err = strconv.ParseInt(value1, 0, 64); err != nil {
+ return nil, nil, fmt.Errorf("parsing sample %s: %v", line, err)
+ }
+ if s.Value[1], err = strconv.ParseInt(value2, 0, 64); err != nil {
+ return nil, nil, fmt.Errorf("parsing sample %s: %v", line, err)
+ }
+
+ switch pType {
+ case "heap":
+ const javaHeapzSamplingRate = 524288 // 512K
+ if s.Value[0] == 0 {
+ return nil, nil, fmt.Errorf("parsing sample %s: second value must be non-zero", line)
+ }
+ s.NumLabel = map[string][]int64{"bytes": {s.Value[1] / s.Value[0]}}
+ s.Value[0], s.Value[1] = scaleHeapSample(s.Value[0], s.Value[1], javaHeapzSamplingRate)
+ case "contention":
+ if period := p.Period; period != 0 {
+ s.Value[0] = s.Value[0] * p.Period
+ s.Value[1] = s.Value[1] * p.Period
+ }
+ }
+ p.Sample = append(p.Sample, s)
+ }
+ // Grab next line.
+ b = b[nextNewLine+1:]
+ nextNewLine = bytes.IndexByte(b, byte('\n'))
+ }
+ return b, locs, nil
+}
+
+// parseJavaLocations parses the location information in a java
+// profile and populates the Locations in a profile. It uses the
+// location addresses from the profile as both the ID of each
+// location.
+func parseJavaLocations(b []byte, locs map[uint64]*Location, p *Profile) error {
+ r := bytes.NewBuffer(b)
+ fns := make(map[string]*Function)
+ for {
+ line, err := r.ReadString('\n')
+ if err != nil {
+ if err != io.EOF {
+ return err
+ }
+ if line == "" {
+ break
+ }
+ }
+
+ if line = strings.TrimSpace(line); line == "" {
+ continue
+ }
+
+ jloc := javaLocationRx.FindStringSubmatch(line)
+ if len(jloc) != 3 {
+ continue
+ }
+ addr, err := strconv.ParseUint(jloc[1], 16, 64)
+ if err != nil {
+ return fmt.Errorf("parsing sample %s: %v", line, err)
+ }
+ loc := locs[addr]
+ if loc == nil {
+ // Unused/unseen
+ continue
+ }
+ var lineFunc, lineFile string
+ var lineNo int64
+
+ if fileLine := javaLocationFileLineRx.FindStringSubmatch(jloc[2]); len(fileLine) == 4 {
+ // Found a line of the form: "function (file:line)"
+ lineFunc, lineFile = fileLine[1], fileLine[2]
+ if n, err := strconv.ParseInt(fileLine[3], 10, 64); err == nil && n > 0 {
+ lineNo = n
+ }
+ } else if filePath := javaLocationPathRx.FindStringSubmatch(jloc[2]); len(filePath) == 3 {
+ // If there's not a file:line, it's a shared library path.
+ // The path isn't interesting, so just give the .so.
+ lineFunc, lineFile = filePath[1], filepath.Base(filePath[2])
+ } else if strings.Contains(jloc[2], "generated stub/JIT") {
+ lineFunc = "STUB"
+ } else {
+ // Treat whole line as the function name. This is used by the
+ // java agent for internal states such as "GC" or "VM".
+ lineFunc = jloc[2]
+ }
+ fn := fns[lineFunc]
+
+ if fn == nil {
+ fn = &Function{
+ Name: lineFunc,
+ SystemName: lineFunc,
+ Filename: lineFile,
+ }
+ fns[lineFunc] = fn
+ p.Function = append(p.Function, fn)
+ }
+ loc.Line = []Line{
+ {
+ Function: fn,
+ Line: lineNo,
+ },
+ }
+ loc.Address = 0
+ }
+
+ p.remapLocationIDs()
+ p.remapFunctionIDs()
+ p.remapMappingIDs()
+
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go b/src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go
new file mode 100644
index 0000000..8d07fd6
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go
@@ -0,0 +1,1228 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file implements parsers to convert legacy profiles into the
+// profile.proto format.
+
+package profile
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "math"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var (
+ countStartRE = regexp.MustCompile(`\A(\S+) profile: total \d+\z`)
+ countRE = regexp.MustCompile(`\A(\d+) @(( 0x[0-9a-f]+)+)\z`)
+
+ heapHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] *@ *(heap[_a-z0-9]*)/?(\d*)`)
+ heapSampleRE = regexp.MustCompile(`(-?\d+): *(-?\d+) *\[ *(\d+): *(\d+) *] @([ x0-9a-f]*)`)
+
+ contentionSampleRE = regexp.MustCompile(`(\d+) *(\d+) @([ x0-9a-f]*)`)
+
+ hexNumberRE = regexp.MustCompile(`0x[0-9a-f]+`)
+
+ growthHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] @ growthz?`)
+
+ fragmentationHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] @ fragmentationz?`)
+
+ threadzStartRE = regexp.MustCompile(`--- threadz \d+ ---`)
+ threadStartRE = regexp.MustCompile(`--- Thread ([[:xdigit:]]+) \(name: (.*)/(\d+)\) stack: ---`)
+
+ // Regular expressions to parse process mappings. Support the format used by Linux /proc/.../maps and other tools.
+ // Recommended format:
+ // Start End object file name offset(optional) linker build id
+ // 0x40000-0x80000 /path/to/binary (@FF00) abc123456
+ spaceDigits = `\s+[[:digit:]]+`
+ hexPair = `\s+[[:xdigit:]]+:[[:xdigit:]]+`
+ oSpace = `\s*`
+ // Capturing expressions.
+ cHex = `(?:0x)?([[:xdigit:]]+)`
+ cHexRange = `\s*` + cHex + `[\s-]?` + oSpace + cHex + `:?`
+ cSpaceString = `(?:\s+(\S+))?`
+ cSpaceHex = `(?:\s+([[:xdigit:]]+))?`
+ cSpaceAtOffset = `(?:\s+\(@([[:xdigit:]]+)\))?`
+ cPerm = `(?:\s+([-rwxp]+))?`
+
+ procMapsRE = regexp.MustCompile(`^` + cHexRange + cPerm + cSpaceHex + hexPair + spaceDigits + cSpaceString)
+ briefMapsRE = regexp.MustCompile(`^` + cHexRange + cPerm + cSpaceString + cSpaceAtOffset + cSpaceHex)
+
+ // Regular expression to parse log data, of the form:
+ // ... file:line] msg...
+ logInfoRE = regexp.MustCompile(`^[^\[\]]+:[0-9]+]\s`)
+)
+
+func isSpaceOrComment(line string) bool {
+ trimmed := strings.TrimSpace(line)
+ return len(trimmed) == 0 || trimmed[0] == '#'
+}
+
+// parseGoCount parses a Go count profile (e.g., threadcreate or
+// goroutine) and returns a new Profile.
+func parseGoCount(b []byte) (*Profile, error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ // Skip comments at the beginning of the file.
+ for s.Scan() && isSpaceOrComment(s.Text()) {
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ m := countStartRE.FindStringSubmatch(s.Text())
+ if m == nil {
+ return nil, errUnrecognized
+ }
+ profileType := m[1]
+ p := &Profile{
+ PeriodType: &ValueType{Type: profileType, Unit: "count"},
+ Period: 1,
+ SampleType: []*ValueType{{Type: profileType, Unit: "count"}},
+ }
+ locations := make(map[uint64]*Location)
+ for s.Scan() {
+ line := s.Text()
+ if isSpaceOrComment(line) {
+ continue
+ }
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ m := countRE.FindStringSubmatch(line)
+ if m == nil {
+ return nil, errMalformed
+ }
+ n, err := strconv.ParseInt(m[1], 0, 64)
+ if err != nil {
+ return nil, errMalformed
+ }
+ fields := strings.Fields(m[2])
+ locs := make([]*Location, 0, len(fields))
+ for _, stk := range fields {
+ addr, err := strconv.ParseUint(stk, 0, 64)
+ if err != nil {
+ return nil, errMalformed
+ }
+ // Adjust all frames by -1 to land on top of the call instruction.
+ addr--
+ loc := locations[addr]
+ if loc == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ locations[addr] = loc
+ p.Location = append(p.Location, loc)
+ }
+ locs = append(locs, loc)
+ }
+ p.Sample = append(p.Sample, &Sample{
+ Location: locs,
+ Value: []int64{n},
+ })
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+ return p, nil
+}
+
+// remapLocationIDs ensures there is a location for each address
+// referenced by a sample, and remaps the samples to point to the new
+// location ids.
+func (p *Profile) remapLocationIDs() {
+ seen := make(map[*Location]bool, len(p.Location))
+ var locs []*Location
+
+ for _, s := range p.Sample {
+ for _, l := range s.Location {
+ if seen[l] {
+ continue
+ }
+ l.ID = uint64(len(locs) + 1)
+ locs = append(locs, l)
+ seen[l] = true
+ }
+ }
+ p.Location = locs
+}
+
+func (p *Profile) remapFunctionIDs() {
+ seen := make(map[*Function]bool, len(p.Function))
+ var fns []*Function
+
+ for _, l := range p.Location {
+ for _, ln := range l.Line {
+ fn := ln.Function
+ if fn == nil || seen[fn] {
+ continue
+ }
+ fn.ID = uint64(len(fns) + 1)
+ fns = append(fns, fn)
+ seen[fn] = true
+ }
+ }
+ p.Function = fns
+}
+
+// remapMappingIDs matches location addresses with existing mappings
+// and updates them appropriately. This is O(N*M), if this ever shows
+// up as a bottleneck, evaluate sorting the mappings and doing a
+// binary search, which would make it O(N*log(M)).
+func (p *Profile) remapMappingIDs() {
+ // Some profile handlers will incorrectly set regions for the main
+ // executable if its section is remapped. Fix them through heuristics.
+
+ if len(p.Mapping) > 0 {
+ // Remove the initial mapping if named '/anon_hugepage' and has a
+ // consecutive adjacent mapping.
+ if m := p.Mapping[0]; strings.HasPrefix(m.File, "/anon_hugepage") {
+ if len(p.Mapping) > 1 && m.Limit == p.Mapping[1].Start {
+ p.Mapping = p.Mapping[1:]
+ }
+ }
+ }
+
+ // Subtract the offset from the start of the main mapping if it
+ // ends up at a recognizable start address.
+ if len(p.Mapping) > 0 {
+ const expectedStart = 0x400000
+ if m := p.Mapping[0]; m.Start-m.Offset == expectedStart {
+ m.Start = expectedStart
+ m.Offset = 0
+ }
+ }
+
+ // Associate each location with an address to the corresponding
+ // mapping. Create fake mapping if a suitable one isn't found.
+ var fake *Mapping
+nextLocation:
+ for _, l := range p.Location {
+ a := l.Address
+ if l.Mapping != nil || a == 0 {
+ continue
+ }
+ for _, m := range p.Mapping {
+ if m.Start <= a && a < m.Limit {
+ l.Mapping = m
+ continue nextLocation
+ }
+ }
+ // Work around legacy handlers failing to encode the first
+ // part of mappings split into adjacent ranges.
+ for _, m := range p.Mapping {
+ if m.Offset != 0 && m.Start-m.Offset <= a && a < m.Start {
+ m.Start -= m.Offset
+ m.Offset = 0
+ l.Mapping = m
+ continue nextLocation
+ }
+ }
+ // If there is still no mapping, create a fake one.
+ // This is important for the Go legacy handler, which produced
+ // no mappings.
+ if fake == nil {
+ fake = &Mapping{
+ ID: 1,
+ Limit: ^uint64(0),
+ }
+ p.Mapping = append(p.Mapping, fake)
+ }
+ l.Mapping = fake
+ }
+
+ // Reset all mapping IDs.
+ for i, m := range p.Mapping {
+ m.ID = uint64(i + 1)
+ }
+}
+
+var cpuInts = []func([]byte) (uint64, []byte){
+ get32l,
+ get32b,
+ get64l,
+ get64b,
+}
+
+func get32l(b []byte) (uint64, []byte) {
+ if len(b) < 4 {
+ return 0, nil
+ }
+ return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24, b[4:]
+}
+
+func get32b(b []byte) (uint64, []byte) {
+ if len(b) < 4 {
+ return 0, nil
+ }
+ return uint64(b[3]) | uint64(b[2])<<8 | uint64(b[1])<<16 | uint64(b[0])<<24, b[4:]
+}
+
+func get64l(b []byte) (uint64, []byte) {
+ if len(b) < 8 {
+ return 0, nil
+ }
+ return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56, b[8:]
+}
+
+func get64b(b []byte) (uint64, []byte) {
+ if len(b) < 8 {
+ return 0, nil
+ }
+ return uint64(b[7]) | uint64(b[6])<<8 | uint64(b[5])<<16 | uint64(b[4])<<24 | uint64(b[3])<<32 | uint64(b[2])<<40 | uint64(b[1])<<48 | uint64(b[0])<<56, b[8:]
+}
+
+// parseCPU parses a profilez legacy profile and returns a newly
+// populated Profile.
+//
+// The general format for profilez samples is a sequence of words in
+// binary format. The first words are a header with the following data:
+//
+// 1st word -- 0
+// 2nd word -- 3
+// 3rd word -- 0 if a c++ application, 1 if a java application.
+// 4th word -- Sampling period (in microseconds).
+// 5th word -- Padding.
+func parseCPU(b []byte) (*Profile, error) {
+ var parse func([]byte) (uint64, []byte)
+ var n1, n2, n3, n4, n5 uint64
+ for _, parse = range cpuInts {
+ var tmp []byte
+ n1, tmp = parse(b)
+ n2, tmp = parse(tmp)
+ n3, tmp = parse(tmp)
+ n4, tmp = parse(tmp)
+ n5, tmp = parse(tmp)
+
+ if tmp != nil && n1 == 0 && n2 == 3 && n3 == 0 && n4 > 0 && n5 == 0 {
+ b = tmp
+ return cpuProfile(b, int64(n4), parse)
+ }
+ if tmp != nil && n1 == 0 && n2 == 3 && n3 == 1 && n4 > 0 && n5 == 0 {
+ b = tmp
+ return javaCPUProfile(b, int64(n4), parse)
+ }
+ }
+ return nil, errUnrecognized
+}
+
+// cpuProfile returns a new Profile from C++ profilez data.
+// b is the profile bytes after the header, period is the profiling
+// period, and parse is a function to parse 8-byte chunks from the
+// profile in its native endianness.
+func cpuProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte)) (*Profile, error) {
+ p := &Profile{
+ Period: period * 1000,
+ PeriodType: &ValueType{Type: "cpu", Unit: "nanoseconds"},
+ SampleType: []*ValueType{
+ {Type: "samples", Unit: "count"},
+ {Type: "cpu", Unit: "nanoseconds"},
+ },
+ }
+ var err error
+ if b, _, err = parseCPUSamples(b, parse, true, p); err != nil {
+ return nil, err
+ }
+
+ // If *most* samples have the same second-to-the-bottom frame, it
+ // strongly suggests that it is an uninteresting artifact of
+ // measurement -- a stack frame pushed by the signal handler. The
+ // bottom frame is always correct as it is picked up from the signal
+ // structure, not the stack. Check if this is the case and if so,
+ // remove.
+
+ // Remove up to two frames.
+ maxiter := 2
+ // Allow one different sample for this many samples with the same
+ // second-to-last frame.
+ similarSamples := 32
+ margin := len(p.Sample) / similarSamples
+
+ for iter := 0; iter < maxiter; iter++ {
+ addr1 := make(map[uint64]int)
+ for _, s := range p.Sample {
+ if len(s.Location) > 1 {
+ a := s.Location[1].Address
+ addr1[a] = addr1[a] + 1
+ }
+ }
+
+ for id1, count := range addr1 {
+ if count >= len(p.Sample)-margin {
+ // Found uninteresting frame, strip it out from all samples
+ for _, s := range p.Sample {
+ if len(s.Location) > 1 && s.Location[1].Address == id1 {
+ s.Location = append(s.Location[:1], s.Location[2:]...)
+ }
+ }
+ break
+ }
+ }
+ }
+
+ if err := p.ParseMemoryMap(bytes.NewBuffer(b)); err != nil {
+ return nil, err
+ }
+
+ cleanupDuplicateLocations(p)
+ return p, nil
+}
+
+func cleanupDuplicateLocations(p *Profile) {
+ // The profile handler may duplicate the leaf frame, because it gets
+ // its address both from stack unwinding and from the signal
+ // context. Detect this and delete the duplicate, which has been
+ // adjusted by -1. The leaf address should not be adjusted as it is
+ // not a call.
+ for _, s := range p.Sample {
+ if len(s.Location) > 1 && s.Location[0].Address == s.Location[1].Address+1 {
+ s.Location = append(s.Location[:1], s.Location[2:]...)
+ }
+ }
+}
+
+// parseCPUSamples parses a collection of profilez samples from a
+// profile.
+//
+// profilez samples are a repeated sequence of stack frames of the
+// form:
+//
+// 1st word -- The number of times this stack was encountered.
+// 2nd word -- The size of the stack (StackSize).
+// 3rd word -- The first address on the stack.
+// ...
+// StackSize + 2 -- The last address on the stack
+//
+// The last stack trace is of the form:
+//
+// 1st word -- 0
+// 2nd word -- 1
+// 3rd word -- 0
+//
+// Addresses from stack traces may point to the next instruction after
+// each call. Optionally adjust by -1 to land somewhere on the actual
+// call (except for the leaf, which is not a call).
+func parseCPUSamples(b []byte, parse func(b []byte) (uint64, []byte), adjust bool, p *Profile) ([]byte, map[uint64]*Location, error) {
+ locs := make(map[uint64]*Location)
+ for len(b) > 0 {
+ var count, nstk uint64
+ count, b = parse(b)
+ nstk, b = parse(b)
+ if b == nil || nstk > uint64(len(b)/4) {
+ return nil, nil, errUnrecognized
+ }
+ var sloc []*Location
+ addrs := make([]uint64, nstk)
+ for i := 0; i < int(nstk); i++ {
+ addrs[i], b = parse(b)
+ }
+
+ if count == 0 && nstk == 1 && addrs[0] == 0 {
+ // End of data marker
+ break
+ }
+ for i, addr := range addrs {
+ if adjust && i > 0 {
+ addr--
+ }
+ loc := locs[addr]
+ if loc == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ locs[addr] = loc
+ p.Location = append(p.Location, loc)
+ }
+ sloc = append(sloc, loc)
+ }
+ p.Sample = append(p.Sample,
+ &Sample{
+ Value: []int64{int64(count), int64(count) * p.Period},
+ Location: sloc,
+ })
+ }
+ // Reached the end without finding the EOD marker.
+ return b, locs, nil
+}
+
+// parseHeap parses a heapz legacy or a growthz profile and
+// returns a newly populated Profile.
+func parseHeap(b []byte) (p *Profile, err error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ if !s.Scan() {
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return nil, errUnrecognized
+ }
+ p = &Profile{}
+
+ sampling := ""
+ hasAlloc := false
+
+ line := s.Text()
+ p.PeriodType = &ValueType{Type: "space", Unit: "bytes"}
+ if header := heapHeaderRE.FindStringSubmatch(line); header != nil {
+ sampling, p.Period, hasAlloc, err = parseHeapHeader(line)
+ if err != nil {
+ return nil, err
+ }
+ } else if header = growthHeaderRE.FindStringSubmatch(line); header != nil {
+ p.Period = 1
+ } else if header = fragmentationHeaderRE.FindStringSubmatch(line); header != nil {
+ p.Period = 1
+ } else {
+ return nil, errUnrecognized
+ }
+
+ if hasAlloc {
+ // Put alloc before inuse so that default pprof selection
+ // will prefer inuse_space.
+ p.SampleType = []*ValueType{
+ {Type: "alloc_objects", Unit: "count"},
+ {Type: "alloc_space", Unit: "bytes"},
+ {Type: "inuse_objects", Unit: "count"},
+ {Type: "inuse_space", Unit: "bytes"},
+ }
+ } else {
+ p.SampleType = []*ValueType{
+ {Type: "objects", Unit: "count"},
+ {Type: "space", Unit: "bytes"},
+ }
+ }
+
+ locs := make(map[uint64]*Location)
+ for s.Scan() {
+ line := strings.TrimSpace(s.Text())
+
+ if isSpaceOrComment(line) {
+ continue
+ }
+
+ if isMemoryMapSentinel(line) {
+ break
+ }
+
+ value, blocksize, addrs, err := parseHeapSample(line, p.Period, sampling, hasAlloc)
+ if err != nil {
+ return nil, err
+ }
+
+ var sloc []*Location
+ for _, addr := range addrs {
+ // Addresses from stack traces point to the next instruction after
+ // each call. Adjust by -1 to land somewhere on the actual call.
+ addr--
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+
+ p.Sample = append(p.Sample, &Sample{
+ Value: value,
+ Location: sloc,
+ NumLabel: map[string][]int64{"bytes": {blocksize}},
+ })
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+ return p, nil
+}
+
+func parseHeapHeader(line string) (sampling string, period int64, hasAlloc bool, err error) {
+ header := heapHeaderRE.FindStringSubmatch(line)
+ if header == nil {
+ return "", 0, false, errUnrecognized
+ }
+
+ if len(header[6]) > 0 {
+ if period, err = strconv.ParseInt(header[6], 10, 64); err != nil {
+ return "", 0, false, errUnrecognized
+ }
+ }
+
+ if (header[3] != header[1] && header[3] != "0") || (header[4] != header[2] && header[4] != "0") {
+ hasAlloc = true
+ }
+
+ switch header[5] {
+ case "heapz_v2", "heap_v2":
+ return "v2", period, hasAlloc, nil
+ case "heapprofile":
+ return "", 1, hasAlloc, nil
+ case "heap":
+ return "v2", period / 2, hasAlloc, nil
+ default:
+ return "", 0, false, errUnrecognized
+ }
+}
+
+// parseHeapSample parses a single row from a heap profile into a new Sample.
+func parseHeapSample(line string, rate int64, sampling string, includeAlloc bool) (value []int64, blocksize int64, addrs []uint64, err error) {
+ sampleData := heapSampleRE.FindStringSubmatch(line)
+ if len(sampleData) != 6 {
+ return nil, 0, nil, fmt.Errorf("unexpected number of sample values: got %d, want 6", len(sampleData))
+ }
+
+ // This is a local-scoped helper function to avoid needing to pass
+ // around rate, sampling and many return parameters.
+ addValues := func(countString, sizeString string, label string) error {
+ count, err := strconv.ParseInt(countString, 10, 64)
+ if err != nil {
+ return fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ size, err := strconv.ParseInt(sizeString, 10, 64)
+ if err != nil {
+ return fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ if count == 0 && size != 0 {
+ return fmt.Errorf("%s count was 0 but %s bytes was %d", label, label, size)
+ }
+ if count != 0 {
+ blocksize = size / count
+ if sampling == "v2" {
+ count, size = scaleHeapSample(count, size, rate)
+ }
+ }
+ value = append(value, count, size)
+ return nil
+ }
+
+ if includeAlloc {
+ if err := addValues(sampleData[3], sampleData[4], "allocation"); err != nil {
+ return nil, 0, nil, err
+ }
+ }
+
+ if err := addValues(sampleData[1], sampleData[2], "inuse"); err != nil {
+ return nil, 0, nil, err
+ }
+
+ addrs, err = parseHexAddresses(sampleData[5])
+ if err != nil {
+ return nil, 0, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ return value, blocksize, addrs, nil
+}
+
+// parseHexAddresses extracts hex numbers from a string, attempts to convert
+// each to an unsigned 64-bit number and returns the resulting numbers as a
+// slice, or an error if the string contains hex numbers which are too large to
+// handle (which means a malformed profile).
+func parseHexAddresses(s string) ([]uint64, error) {
+ hexStrings := hexNumberRE.FindAllString(s, -1)
+ var addrs []uint64
+ for _, s := range hexStrings {
+ if addr, err := strconv.ParseUint(s, 0, 64); err == nil {
+ addrs = append(addrs, addr)
+ } else {
+ return nil, fmt.Errorf("failed to parse as hex 64-bit number: %s", s)
+ }
+ }
+ return addrs, nil
+}
+
+// scaleHeapSample adjusts the data from a heapz Sample to
+// account for its probability of appearing in the collected
+// data. heapz profiles are a sampling of the memory allocations
+// requests in a program. We estimate the unsampled value by dividing
+// each collected sample by its probability of appearing in the
+// profile. heapz v2 profiles rely on a poisson process to determine
+// which samples to collect, based on the desired average collection
+// rate R. The probability of a sample of size S to appear in that
+// profile is 1-exp(-S/R).
+func scaleHeapSample(count, size, rate int64) (int64, int64) {
+ if count == 0 || size == 0 {
+ return 0, 0
+ }
+
+ if rate <= 1 {
+ // if rate==1 all samples were collected so no adjustment is needed.
+ // if rate<1 treat as unknown and skip scaling.
+ return count, size
+ }
+
+ avgSize := float64(size) / float64(count)
+ scale := 1 / (1 - math.Exp(-avgSize/float64(rate)))
+
+ return int64(float64(count) * scale), int64(float64(size) * scale)
+}
+
+// parseContention parses a mutex or contention profile. There are 2 cases:
+// "--- contentionz " for legacy C++ profiles (and backwards compatibility)
+// "--- mutex:" or "--- contention:" for profiles generated by the Go runtime.
+func parseContention(b []byte) (*Profile, error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ if !s.Scan() {
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return nil, errUnrecognized
+ }
+
+ switch l := s.Text(); {
+ case strings.HasPrefix(l, "--- contentionz "):
+ case strings.HasPrefix(l, "--- mutex:"):
+ case strings.HasPrefix(l, "--- contention:"):
+ default:
+ return nil, errUnrecognized
+ }
+
+ p := &Profile{
+ PeriodType: &ValueType{Type: "contentions", Unit: "count"},
+ Period: 1,
+ SampleType: []*ValueType{
+ {Type: "contentions", Unit: "count"},
+ {Type: "delay", Unit: "nanoseconds"},
+ },
+ }
+
+ var cpuHz int64
+ // Parse text of the form "attribute = value" before the samples.
+ const delimiter = "="
+ for s.Scan() {
+ line := s.Text()
+ if line = strings.TrimSpace(line); isSpaceOrComment(line) {
+ continue
+ }
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ attr := strings.SplitN(line, delimiter, 2)
+ if len(attr) != 2 {
+ break
+ }
+ key, val := strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1])
+ var err error
+ switch key {
+ case "cycles/second":
+ if cpuHz, err = strconv.ParseInt(val, 0, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ case "sampling period":
+ if p.Period, err = strconv.ParseInt(val, 0, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ case "ms since reset":
+ ms, err := strconv.ParseInt(val, 0, 64)
+ if err != nil {
+ return nil, errUnrecognized
+ }
+ p.DurationNanos = ms * 1000 * 1000
+ case "format":
+ // CPP contentionz profiles don't have format.
+ return nil, errUnrecognized
+ case "resolution":
+ // CPP contentionz profiles don't have resolution.
+ return nil, errUnrecognized
+ case "discarded samples":
+ default:
+ return nil, errUnrecognized
+ }
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+
+ locs := make(map[uint64]*Location)
+ for {
+ line := strings.TrimSpace(s.Text())
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ if !isSpaceOrComment(line) {
+ value, addrs, err := parseContentionSample(line, p.Period, cpuHz)
+ if err != nil {
+ return nil, err
+ }
+ var sloc []*Location
+ for _, addr := range addrs {
+ // Addresses from stack traces point to the next instruction after
+ // each call. Adjust by -1 to land somewhere on the actual call.
+ addr--
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+ p.Sample = append(p.Sample, &Sample{
+ Value: value,
+ Location: sloc,
+ })
+ }
+ if !s.Scan() {
+ break
+ }
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+// parseContentionSample parses a single row from a contention profile
+// into a new Sample.
+func parseContentionSample(line string, period, cpuHz int64) (value []int64, addrs []uint64, err error) {
+ sampleData := contentionSampleRE.FindStringSubmatch(line)
+ if sampleData == nil {
+ return nil, nil, errUnrecognized
+ }
+
+ v1, err := strconv.ParseInt(sampleData[1], 10, 64)
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ v2, err := strconv.ParseInt(sampleData[2], 10, 64)
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ // Unsample values if period and cpuHz are available.
+ // - Delays are scaled to cycles and then to nanoseconds.
+ // - Contentions are scaled to cycles.
+ if period > 0 {
+ if cpuHz > 0 {
+ cpuGHz := float64(cpuHz) / 1e9
+ v1 = int64(float64(v1) * float64(period) / cpuGHz)
+ }
+ v2 = v2 * period
+ }
+
+ value = []int64{v2, v1}
+ addrs, err = parseHexAddresses(sampleData[3])
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ return value, addrs, nil
+}
+
+// parseThread parses a Threadz profile and returns a new Profile.
+func parseThread(b []byte) (*Profile, error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ // Skip past comments and empty lines seeking a real header.
+ for s.Scan() && isSpaceOrComment(s.Text()) {
+ }
+
+ line := s.Text()
+ if m := threadzStartRE.FindStringSubmatch(line); m != nil {
+ // Advance over initial comments until first stack trace.
+ for s.Scan() {
+ if line = s.Text(); isMemoryMapSentinel(line) || strings.HasPrefix(line, "-") {
+ break
+ }
+ }
+ } else if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 {
+ return nil, errUnrecognized
+ }
+
+ p := &Profile{
+ SampleType: []*ValueType{{Type: "thread", Unit: "count"}},
+ PeriodType: &ValueType{Type: "thread", Unit: "count"},
+ Period: 1,
+ }
+
+ locs := make(map[uint64]*Location)
+ // Recognize each thread and populate profile samples.
+ for !isMemoryMapSentinel(line) {
+ if strings.HasPrefix(line, "---- no stack trace for") {
+ break
+ }
+ if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 {
+ return nil, errUnrecognized
+ }
+
+ var addrs []uint64
+ var err error
+ line, addrs, err = parseThreadSample(s)
+ if err != nil {
+ return nil, err
+ }
+ if len(addrs) == 0 {
+ // We got a --same as previous threads--. Bump counters.
+ if len(p.Sample) > 0 {
+ s := p.Sample[len(p.Sample)-1]
+ s.Value[0]++
+ }
+ continue
+ }
+
+ var sloc []*Location
+ for i, addr := range addrs {
+ // Addresses from stack traces point to the next instruction after
+ // each call. Adjust by -1 to land somewhere on the actual call
+ // (except for the leaf, which is not a call).
+ if i > 0 {
+ addr--
+ }
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+
+ p.Sample = append(p.Sample, &Sample{
+ Value: []int64{1},
+ Location: sloc,
+ })
+ }
+
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+
+ cleanupDuplicateLocations(p)
+ return p, nil
+}
+
+// parseThreadSample parses a symbolized or unsymbolized stack trace.
+// Returns the first line after the traceback, the sample (or nil if
+// it hits a 'same-as-previous' marker) and an error.
+func parseThreadSample(s *bufio.Scanner) (nextl string, addrs []uint64, err error) {
+ var line string
+ sameAsPrevious := false
+ for s.Scan() {
+ line = strings.TrimSpace(s.Text())
+ if line == "" {
+ continue
+ }
+
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ if strings.Contains(line, "same as previous thread") {
+ sameAsPrevious = true
+ continue
+ }
+
+ curAddrs, err := parseHexAddresses(line)
+ if err != nil {
+ return "", nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ addrs = append(addrs, curAddrs...)
+ }
+ if err := s.Err(); err != nil {
+ return "", nil, err
+ }
+ if sameAsPrevious {
+ return line, nil, nil
+ }
+ return line, addrs, nil
+}
+
+// parseAdditionalSections parses any additional sections in the
+// profile, ignoring any unrecognized sections.
+func parseAdditionalSections(s *bufio.Scanner, p *Profile) error {
+ for !isMemoryMapSentinel(s.Text()) && s.Scan() {
+ }
+ if err := s.Err(); err != nil {
+ return err
+ }
+ return p.ParseMemoryMapFromScanner(s)
+}
+
+// ParseProcMaps parses a memory map in the format of /proc/self/maps.
+// ParseMemoryMap should be called after setting on a profile to
+// associate locations to the corresponding mapping based on their
+// address.
+func ParseProcMaps(rd io.Reader) ([]*Mapping, error) {
+ s := bufio.NewScanner(rd)
+ return parseProcMapsFromScanner(s)
+}
+
+func parseProcMapsFromScanner(s *bufio.Scanner) ([]*Mapping, error) {
+ var mapping []*Mapping
+
+ var attrs []string
+ const delimiter = "="
+ r := strings.NewReplacer()
+ for s.Scan() {
+ line := r.Replace(removeLoggingInfo(s.Text()))
+ m, err := parseMappingEntry(line)
+ if err != nil {
+ if err == errUnrecognized {
+ // Recognize assignments of the form: attr=value, and replace
+ // $attr with value on subsequent mappings.
+ if attr := strings.SplitN(line, delimiter, 2); len(attr) == 2 {
+ attrs = append(attrs, "$"+strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1]))
+ r = strings.NewReplacer(attrs...)
+ }
+ // Ignore any unrecognized entries
+ continue
+ }
+ return nil, err
+ }
+ if m == nil {
+ continue
+ }
+ mapping = append(mapping, m)
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return mapping, nil
+}
+
+// removeLoggingInfo detects and removes log prefix entries generated
+// by the glog package. If no logging prefix is detected, the string
+// is returned unmodified.
+func removeLoggingInfo(line string) string {
+ if match := logInfoRE.FindStringIndex(line); match != nil {
+ return line[match[1]:]
+ }
+ return line
+}
+
+// ParseMemoryMap parses a memory map in the format of
+// /proc/self/maps, and overrides the mappings in the current profile.
+// It renumbers the samples and locations in the profile correspondingly.
+func (p *Profile) ParseMemoryMap(rd io.Reader) error {
+ return p.ParseMemoryMapFromScanner(bufio.NewScanner(rd))
+}
+
+// ParseMemoryMapFromScanner parses a memory map in the format of
+// /proc/self/maps or a variety of legacy format, and overrides the
+// mappings in the current profile. It renumbers the samples and
+// locations in the profile correspondingly.
+func (p *Profile) ParseMemoryMapFromScanner(s *bufio.Scanner) error {
+ mapping, err := parseProcMapsFromScanner(s)
+ if err != nil {
+ return err
+ }
+ p.Mapping = append(p.Mapping, mapping...)
+ p.massageMappings()
+ p.remapLocationIDs()
+ p.remapFunctionIDs()
+ p.remapMappingIDs()
+ return nil
+}
+
+func parseMappingEntry(l string) (*Mapping, error) {
+ var start, end, perm, file, offset, buildID string
+ if me := procMapsRE.FindStringSubmatch(l); len(me) == 6 {
+ start, end, perm, offset, file = me[1], me[2], me[3], me[4], me[5]
+ } else if me := briefMapsRE.FindStringSubmatch(l); len(me) == 7 {
+ start, end, perm, file, offset, buildID = me[1], me[2], me[3], me[4], me[5], me[6]
+ } else {
+ return nil, errUnrecognized
+ }
+
+ var err error
+ mapping := &Mapping{
+ File: file,
+ BuildID: buildID,
+ }
+ if perm != "" && !strings.Contains(perm, "x") {
+ // Skip non-executable entries.
+ return nil, nil
+ }
+ if mapping.Start, err = strconv.ParseUint(start, 16, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ if mapping.Limit, err = strconv.ParseUint(end, 16, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ if offset != "" {
+ if mapping.Offset, err = strconv.ParseUint(offset, 16, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ }
+ return mapping, nil
+}
+
+var memoryMapSentinels = []string{
+ "--- Memory map: ---",
+ "MAPPED_LIBRARIES:",
+}
+
+// isMemoryMapSentinel returns true if the string contains one of the
+// known sentinels for memory map information.
+func isMemoryMapSentinel(line string) bool {
+ for _, s := range memoryMapSentinels {
+ if strings.Contains(line, s) {
+ return true
+ }
+ }
+ return false
+}
+
+func (p *Profile) addLegacyFrameInfo() {
+ switch {
+ case isProfileType(p, heapzSampleTypes):
+ p.DropFrames, p.KeepFrames = allocRxStr, allocSkipRxStr
+ case isProfileType(p, contentionzSampleTypes):
+ p.DropFrames, p.KeepFrames = lockRxStr, ""
+ default:
+ p.DropFrames, p.KeepFrames = cpuProfilerRxStr, ""
+ }
+}
+
+var heapzSampleTypes = [][]string{
+ {"allocations", "size"}, // early Go pprof profiles
+ {"objects", "space"},
+ {"inuse_objects", "inuse_space"},
+ {"alloc_objects", "alloc_space"},
+ {"alloc_objects", "alloc_space", "inuse_objects", "inuse_space"}, // Go pprof legacy profiles
+}
+var contentionzSampleTypes = [][]string{
+ {"contentions", "delay"},
+}
+
+func isProfileType(p *Profile, types [][]string) bool {
+ st := p.SampleType
+nextType:
+ for _, t := range types {
+ if len(st) != len(t) {
+ continue
+ }
+
+ for i := range st {
+ if st[i].Type != t[i] {
+ continue nextType
+ }
+ }
+ return true
+ }
+ return false
+}
+
+var allocRxStr = strings.Join([]string{
+ // POSIX entry points.
+ `calloc`,
+ `cfree`,
+ `malloc`,
+ `free`,
+ `memalign`,
+ `do_memalign`,
+ `(__)?posix_memalign`,
+ `pvalloc`,
+ `valloc`,
+ `realloc`,
+
+ // TC malloc.
+ `tcmalloc::.*`,
+ `tc_calloc`,
+ `tc_cfree`,
+ `tc_malloc`,
+ `tc_free`,
+ `tc_memalign`,
+ `tc_posix_memalign`,
+ `tc_pvalloc`,
+ `tc_valloc`,
+ `tc_realloc`,
+ `tc_new`,
+ `tc_delete`,
+ `tc_newarray`,
+ `tc_deletearray`,
+ `tc_new_nothrow`,
+ `tc_newarray_nothrow`,
+
+ // Memory-allocation routines on OS X.
+ `malloc_zone_malloc`,
+ `malloc_zone_calloc`,
+ `malloc_zone_valloc`,
+ `malloc_zone_realloc`,
+ `malloc_zone_memalign`,
+ `malloc_zone_free`,
+
+ // Go runtime
+ `runtime\..*`,
+
+ // Other misc. memory allocation routines
+ `BaseArena::.*`,
+ `(::)?do_malloc_no_errno`,
+ `(::)?do_malloc_pages`,
+ `(::)?do_malloc`,
+ `DoSampledAllocation`,
+ `MallocedMemBlock::MallocedMemBlock`,
+ `_M_allocate`,
+ `__builtin_(vec_)?delete`,
+ `__builtin_(vec_)?new`,
+ `__gnu_cxx::new_allocator::allocate`,
+ `__libc_malloc`,
+ `__malloc_alloc_template::allocate`,
+ `allocate`,
+ `cpp_alloc`,
+ `operator new(\[\])?`,
+ `simple_alloc::allocate`,
+}, `|`)
+
+var allocSkipRxStr = strings.Join([]string{
+ // Preserve Go runtime frames that appear in the middle/bottom of
+ // the stack.
+ `runtime\.panic`,
+ `runtime\.reflectcall`,
+ `runtime\.call[0-9]*`,
+}, `|`)
+
+var cpuProfilerRxStr = strings.Join([]string{
+ `ProfileData::Add`,
+ `ProfileData::prof_handler`,
+ `CpuProfiler::prof_handler`,
+ `__pthread_sighandler`,
+ `__restore`,
+}, `|`)
+
+var lockRxStr = strings.Join([]string{
+ `RecordLockProfileData`,
+ `(base::)?RecordLockProfileData.*`,
+ `(base::)?SubmitMutexProfileData.*`,
+ `(base::)?SubmitSpinLockProfileData.*`,
+ `(base::Mutex::)?AwaitCommon.*`,
+ `(base::Mutex::)?Unlock.*`,
+ `(base::Mutex::)?UnlockSlow.*`,
+ `(base::Mutex::)?ReaderUnlock.*`,
+ `(base::MutexLock::)?~MutexLock.*`,
+ `(Mutex::)?AwaitCommon.*`,
+ `(Mutex::)?Unlock.*`,
+ `(Mutex::)?UnlockSlow.*`,
+ `(Mutex::)?ReaderUnlock.*`,
+ `(MutexLock::)?~MutexLock.*`,
+ `(SpinLock::)?Unlock.*`,
+ `(SpinLock::)?SlowUnlock.*`,
+ `(SpinLockHolder::)?~SpinLockHolder.*`,
+}, `|`)
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/merge.go b/src/cmd/vendor/github.com/google/pprof/profile/merge.go
new file mode 100644
index 0000000..4b66282
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/merge.go
@@ -0,0 +1,667 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+import (
+ "encoding/binary"
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// Compact performs garbage collection on a profile to remove any
+// unreferenced fields. This is useful to reduce the size of a profile
+// after samples or locations have been removed.
+func (p *Profile) Compact() *Profile {
+ p, _ = Merge([]*Profile{p})
+ return p
+}
+
+// Merge merges all the profiles in profs into a single Profile.
+// Returns a new profile independent of the input profiles. The merged
+// profile is compacted to eliminate unused samples, locations,
+// functions and mappings. Profiles must have identical profile sample
+// and period types or the merge will fail. profile.Period of the
+// resulting profile will be the maximum of all profiles, and
+// profile.TimeNanos will be the earliest nonzero one. Merges are
+// associative with the caveat of the first profile having some
+// specialization in how headers are combined. There may be other
+// subtleties now or in the future regarding associativity.
+func Merge(srcs []*Profile) (*Profile, error) {
+ if len(srcs) == 0 {
+ return nil, fmt.Errorf("no profiles to merge")
+ }
+ p, err := combineHeaders(srcs)
+ if err != nil {
+ return nil, err
+ }
+
+ pm := &profileMerger{
+ p: p,
+ samples: make(map[sampleKey]*Sample, len(srcs[0].Sample)),
+ locations: make(map[locationKey]*Location, len(srcs[0].Location)),
+ functions: make(map[functionKey]*Function, len(srcs[0].Function)),
+ mappings: make(map[mappingKey]*Mapping, len(srcs[0].Mapping)),
+ }
+
+ for _, src := range srcs {
+ // Clear the profile-specific hash tables
+ pm.locationsByID = makeLocationIDMap(len(src.Location))
+ pm.functionsByID = make(map[uint64]*Function, len(src.Function))
+ pm.mappingsByID = make(map[uint64]mapInfo, len(src.Mapping))
+
+ if len(pm.mappings) == 0 && len(src.Mapping) > 0 {
+ // The Mapping list has the property that the first mapping
+ // represents the main binary. Take the first Mapping we see,
+ // otherwise the operations below will add mappings in an
+ // arbitrary order.
+ pm.mapMapping(src.Mapping[0])
+ }
+
+ for _, s := range src.Sample {
+ if !isZeroSample(s) {
+ pm.mapSample(s)
+ }
+ }
+ }
+
+ for _, s := range p.Sample {
+ if isZeroSample(s) {
+ // If there are any zero samples, re-merge the profile to GC
+ // them.
+ return Merge([]*Profile{p})
+ }
+ }
+
+ return p, nil
+}
+
+// Normalize normalizes the source profile by multiplying each value in profile by the
+// ratio of the sum of the base profile's values of that sample type to the sum of the
+// source profile's value of that sample type.
+func (p *Profile) Normalize(pb *Profile) error {
+
+ if err := p.compatible(pb); err != nil {
+ return err
+ }
+
+ baseVals := make([]int64, len(p.SampleType))
+ for _, s := range pb.Sample {
+ for i, v := range s.Value {
+ baseVals[i] += v
+ }
+ }
+
+ srcVals := make([]int64, len(p.SampleType))
+ for _, s := range p.Sample {
+ for i, v := range s.Value {
+ srcVals[i] += v
+ }
+ }
+
+ normScale := make([]float64, len(baseVals))
+ for i := range baseVals {
+ if srcVals[i] == 0 {
+ normScale[i] = 0.0
+ } else {
+ normScale[i] = float64(baseVals[i]) / float64(srcVals[i])
+ }
+ }
+ p.ScaleN(normScale)
+ return nil
+}
+
+func isZeroSample(s *Sample) bool {
+ for _, v := range s.Value {
+ if v != 0 {
+ return false
+ }
+ }
+ return true
+}
+
+type profileMerger struct {
+ p *Profile
+
+ // Memoization tables within a profile.
+ locationsByID locationIDMap
+ functionsByID map[uint64]*Function
+ mappingsByID map[uint64]mapInfo
+
+ // Memoization tables for profile entities.
+ samples map[sampleKey]*Sample
+ locations map[locationKey]*Location
+ functions map[functionKey]*Function
+ mappings map[mappingKey]*Mapping
+}
+
+type mapInfo struct {
+ m *Mapping
+ offset int64
+}
+
+func (pm *profileMerger) mapSample(src *Sample) *Sample {
+ // Check memoization table
+ k := pm.sampleKey(src)
+ if ss, ok := pm.samples[k]; ok {
+ for i, v := range src.Value {
+ ss.Value[i] += v
+ }
+ return ss
+ }
+
+ // Make new sample.
+ s := &Sample{
+ Location: make([]*Location, len(src.Location)),
+ Value: make([]int64, len(src.Value)),
+ Label: make(map[string][]string, len(src.Label)),
+ NumLabel: make(map[string][]int64, len(src.NumLabel)),
+ NumUnit: make(map[string][]string, len(src.NumLabel)),
+ }
+ for i, l := range src.Location {
+ s.Location[i] = pm.mapLocation(l)
+ }
+ for k, v := range src.Label {
+ vv := make([]string, len(v))
+ copy(vv, v)
+ s.Label[k] = vv
+ }
+ for k, v := range src.NumLabel {
+ u := src.NumUnit[k]
+ vv := make([]int64, len(v))
+ uu := make([]string, len(u))
+ copy(vv, v)
+ copy(uu, u)
+ s.NumLabel[k] = vv
+ s.NumUnit[k] = uu
+ }
+ copy(s.Value, src.Value)
+ pm.samples[k] = s
+ pm.p.Sample = append(pm.p.Sample, s)
+ return s
+}
+
+func (pm *profileMerger) sampleKey(sample *Sample) sampleKey {
+ // Accumulate contents into a string.
+ var buf strings.Builder
+ buf.Grow(64) // Heuristic to avoid extra allocs
+
+ // encode a number
+ putNumber := func(v uint64) {
+ var num [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(num[:], v)
+ buf.Write(num[:n])
+ }
+
+ // encode a string prefixed with its length.
+ putDelimitedString := func(s string) {
+ putNumber(uint64(len(s)))
+ buf.WriteString(s)
+ }
+
+ for _, l := range sample.Location {
+ // Get the location in the merged profile, which may have a different ID.
+ if loc := pm.mapLocation(l); loc != nil {
+ putNumber(loc.ID)
+ }
+ }
+ putNumber(0) // Delimiter
+
+ for _, l := range sortedKeys1(sample.Label) {
+ putDelimitedString(l)
+ values := sample.Label[l]
+ putNumber(uint64(len(values)))
+ for _, v := range values {
+ putDelimitedString(v)
+ }
+ }
+
+ for _, l := range sortedKeys2(sample.NumLabel) {
+ putDelimitedString(l)
+ values := sample.NumLabel[l]
+ putNumber(uint64(len(values)))
+ for _, v := range values {
+ putNumber(uint64(v))
+ }
+ units := sample.NumUnit[l]
+ putNumber(uint64(len(units)))
+ for _, v := range units {
+ putDelimitedString(v)
+ }
+ }
+
+ return sampleKey(buf.String())
+}
+
+type sampleKey string
+
+// sortedKeys1 returns the sorted keys found in a string->[]string map.
+//
+// Note: this is currently non-generic since github pprof runs golint,
+// which does not support generics. When that issue is fixed, it can
+// be merged with sortedKeys2 and made into a generic function.
+func sortedKeys1(m map[string][]string) []string {
+ if len(m) == 0 {
+ return nil
+ }
+ keys := make([]string, 0, len(m))
+ for k := range m {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ return keys
+}
+
+// sortedKeys2 returns the sorted keys found in a string->[]int64 map.
+//
+// Note: this is currently non-generic since github pprof runs golint,
+// which does not support generics. When that issue is fixed, it can
+// be merged with sortedKeys1 and made into a generic function.
+func sortedKeys2(m map[string][]int64) []string {
+ if len(m) == 0 {
+ return nil
+ }
+ keys := make([]string, 0, len(m))
+ for k := range m {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ return keys
+}
+
+func (pm *profileMerger) mapLocation(src *Location) *Location {
+ if src == nil {
+ return nil
+ }
+
+ if l := pm.locationsByID.get(src.ID); l != nil {
+ return l
+ }
+
+ mi := pm.mapMapping(src.Mapping)
+ l := &Location{
+ ID: uint64(len(pm.p.Location) + 1),
+ Mapping: mi.m,
+ Address: uint64(int64(src.Address) + mi.offset),
+ Line: make([]Line, len(src.Line)),
+ IsFolded: src.IsFolded,
+ }
+ for i, ln := range src.Line {
+ l.Line[i] = pm.mapLine(ln)
+ }
+ // Check memoization table. Must be done on the remapped location to
+ // account for the remapped mapping ID.
+ k := l.key()
+ if ll, ok := pm.locations[k]; ok {
+ pm.locationsByID.set(src.ID, ll)
+ return ll
+ }
+ pm.locationsByID.set(src.ID, l)
+ pm.locations[k] = l
+ pm.p.Location = append(pm.p.Location, l)
+ return l
+}
+
+// key generates locationKey to be used as a key for maps.
+func (l *Location) key() locationKey {
+ key := locationKey{
+ addr: l.Address,
+ isFolded: l.IsFolded,
+ }
+ if l.Mapping != nil {
+ // Normalizes address to handle address space randomization.
+ key.addr -= l.Mapping.Start
+ key.mappingID = l.Mapping.ID
+ }
+ lines := make([]string, len(l.Line)*2)
+ for i, line := range l.Line {
+ if line.Function != nil {
+ lines[i*2] = strconv.FormatUint(line.Function.ID, 16)
+ }
+ lines[i*2+1] = strconv.FormatInt(line.Line, 16)
+ }
+ key.lines = strings.Join(lines, "|")
+ return key
+}
+
+type locationKey struct {
+ addr, mappingID uint64
+ lines string
+ isFolded bool
+}
+
+func (pm *profileMerger) mapMapping(src *Mapping) mapInfo {
+ if src == nil {
+ return mapInfo{}
+ }
+
+ if mi, ok := pm.mappingsByID[src.ID]; ok {
+ return mi
+ }
+
+ // Check memoization tables.
+ mk := src.key()
+ if m, ok := pm.mappings[mk]; ok {
+ mi := mapInfo{m, int64(m.Start) - int64(src.Start)}
+ pm.mappingsByID[src.ID] = mi
+ return mi
+ }
+ m := &Mapping{
+ ID: uint64(len(pm.p.Mapping) + 1),
+ Start: src.Start,
+ Limit: src.Limit,
+ Offset: src.Offset,
+ File: src.File,
+ KernelRelocationSymbol: src.KernelRelocationSymbol,
+ BuildID: src.BuildID,
+ HasFunctions: src.HasFunctions,
+ HasFilenames: src.HasFilenames,
+ HasLineNumbers: src.HasLineNumbers,
+ HasInlineFrames: src.HasInlineFrames,
+ }
+ pm.p.Mapping = append(pm.p.Mapping, m)
+
+ // Update memoization tables.
+ pm.mappings[mk] = m
+ mi := mapInfo{m, 0}
+ pm.mappingsByID[src.ID] = mi
+ return mi
+}
+
+// key generates encoded strings of Mapping to be used as a key for
+// maps.
+func (m *Mapping) key() mappingKey {
+ // Normalize addresses to handle address space randomization.
+ // Round up to next 4K boundary to avoid minor discrepancies.
+ const mapsizeRounding = 0x1000
+
+ size := m.Limit - m.Start
+ size = size + mapsizeRounding - 1
+ size = size - (size % mapsizeRounding)
+ key := mappingKey{
+ size: size,
+ offset: m.Offset,
+ }
+
+ switch {
+ case m.BuildID != "":
+ key.buildIDOrFile = m.BuildID
+ case m.File != "":
+ key.buildIDOrFile = m.File
+ default:
+ // A mapping containing neither build ID nor file name is a fake mapping. A
+ // key with empty buildIDOrFile is used for fake mappings so that they are
+ // treated as the same mapping during merging.
+ }
+ return key
+}
+
+type mappingKey struct {
+ size, offset uint64
+ buildIDOrFile string
+}
+
+func (pm *profileMerger) mapLine(src Line) Line {
+ ln := Line{
+ Function: pm.mapFunction(src.Function),
+ Line: src.Line,
+ }
+ return ln
+}
+
+func (pm *profileMerger) mapFunction(src *Function) *Function {
+ if src == nil {
+ return nil
+ }
+ if f, ok := pm.functionsByID[src.ID]; ok {
+ return f
+ }
+ k := src.key()
+ if f, ok := pm.functions[k]; ok {
+ pm.functionsByID[src.ID] = f
+ return f
+ }
+ f := &Function{
+ ID: uint64(len(pm.p.Function) + 1),
+ Name: src.Name,
+ SystemName: src.SystemName,
+ Filename: src.Filename,
+ StartLine: src.StartLine,
+ }
+ pm.functions[k] = f
+ pm.functionsByID[src.ID] = f
+ pm.p.Function = append(pm.p.Function, f)
+ return f
+}
+
+// key generates a struct to be used as a key for maps.
+func (f *Function) key() functionKey {
+ return functionKey{
+ f.StartLine,
+ f.Name,
+ f.SystemName,
+ f.Filename,
+ }
+}
+
+type functionKey struct {
+ startLine int64
+ name, systemName, fileName string
+}
+
+// combineHeaders checks that all profiles can be merged and returns
+// their combined profile.
+func combineHeaders(srcs []*Profile) (*Profile, error) {
+ for _, s := range srcs[1:] {
+ if err := srcs[0].compatible(s); err != nil {
+ return nil, err
+ }
+ }
+
+ var timeNanos, durationNanos, period int64
+ var comments []string
+ seenComments := map[string]bool{}
+ var defaultSampleType string
+ for _, s := range srcs {
+ if timeNanos == 0 || s.TimeNanos < timeNanos {
+ timeNanos = s.TimeNanos
+ }
+ durationNanos += s.DurationNanos
+ if period == 0 || period < s.Period {
+ period = s.Period
+ }
+ for _, c := range s.Comments {
+ if seen := seenComments[c]; !seen {
+ comments = append(comments, c)
+ seenComments[c] = true
+ }
+ }
+ if defaultSampleType == "" {
+ defaultSampleType = s.DefaultSampleType
+ }
+ }
+
+ p := &Profile{
+ SampleType: make([]*ValueType, len(srcs[0].SampleType)),
+
+ DropFrames: srcs[0].DropFrames,
+ KeepFrames: srcs[0].KeepFrames,
+
+ TimeNanos: timeNanos,
+ DurationNanos: durationNanos,
+ PeriodType: srcs[0].PeriodType,
+ Period: period,
+
+ Comments: comments,
+ DefaultSampleType: defaultSampleType,
+ }
+ copy(p.SampleType, srcs[0].SampleType)
+ return p, nil
+}
+
+// compatible determines if two profiles can be compared/merged.
+// returns nil if the profiles are compatible; otherwise an error with
+// details on the incompatibility.
+func (p *Profile) compatible(pb *Profile) error {
+ if !equalValueType(p.PeriodType, pb.PeriodType) {
+ return fmt.Errorf("incompatible period types %v and %v", p.PeriodType, pb.PeriodType)
+ }
+
+ if len(p.SampleType) != len(pb.SampleType) {
+ return fmt.Errorf("incompatible sample types %v and %v", p.SampleType, pb.SampleType)
+ }
+
+ for i := range p.SampleType {
+ if !equalValueType(p.SampleType[i], pb.SampleType[i]) {
+ return fmt.Errorf("incompatible sample types %v and %v", p.SampleType, pb.SampleType)
+ }
+ }
+ return nil
+}
+
+// equalValueType returns true if the two value types are semantically
+// equal. It ignores the internal fields used during encode/decode.
+func equalValueType(st1, st2 *ValueType) bool {
+ return st1.Type == st2.Type && st1.Unit == st2.Unit
+}
+
+// locationIDMap is like a map[uint64]*Location, but provides efficiency for
+// ids that are densely numbered, which is often the case.
+type locationIDMap struct {
+ dense []*Location // indexed by id for id < len(dense)
+ sparse map[uint64]*Location // indexed by id for id >= len(dense)
+}
+
+func makeLocationIDMap(n int) locationIDMap {
+ return locationIDMap{
+ dense: make([]*Location, n),
+ sparse: map[uint64]*Location{},
+ }
+}
+
+func (lm locationIDMap) get(id uint64) *Location {
+ if id < uint64(len(lm.dense)) {
+ return lm.dense[int(id)]
+ }
+ return lm.sparse[id]
+}
+
+func (lm locationIDMap) set(id uint64, loc *Location) {
+ if id < uint64(len(lm.dense)) {
+ lm.dense[id] = loc
+ return
+ }
+ lm.sparse[id] = loc
+}
+
+// CompatibilizeSampleTypes makes profiles compatible to be compared/merged. It
+// keeps sample types that appear in all profiles only and drops/reorders the
+// sample types as necessary.
+//
+// In the case of sample types order is not the same for given profiles the
+// order is derived from the first profile.
+//
+// Profiles are modified in-place.
+//
+// It returns an error if the sample type's intersection is empty.
+func CompatibilizeSampleTypes(ps []*Profile) error {
+ sTypes := commonSampleTypes(ps)
+ if len(sTypes) == 0 {
+ return fmt.Errorf("profiles have empty common sample type list")
+ }
+ for _, p := range ps {
+ if err := compatibilizeSampleTypes(p, sTypes); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// commonSampleTypes returns sample types that appear in all profiles in the
+// order how they ordered in the first profile.
+func commonSampleTypes(ps []*Profile) []string {
+ if len(ps) == 0 {
+ return nil
+ }
+ sTypes := map[string]int{}
+ for _, p := range ps {
+ for _, st := range p.SampleType {
+ sTypes[st.Type]++
+ }
+ }
+ var res []string
+ for _, st := range ps[0].SampleType {
+ if sTypes[st.Type] == len(ps) {
+ res = append(res, st.Type)
+ }
+ }
+ return res
+}
+
+// compatibilizeSampleTypes drops sample types that are not present in sTypes
+// list and reorder them if needed.
+//
+// It sets DefaultSampleType to sType[0] if it is not in sType list.
+//
+// It assumes that all sample types from the sTypes list are present in the
+// given profile otherwise it returns an error.
+func compatibilizeSampleTypes(p *Profile, sTypes []string) error {
+ if len(sTypes) == 0 {
+ return fmt.Errorf("sample type list is empty")
+ }
+ defaultSampleType := sTypes[0]
+ reMap, needToModify := make([]int, len(sTypes)), false
+ for i, st := range sTypes {
+ if st == p.DefaultSampleType {
+ defaultSampleType = p.DefaultSampleType
+ }
+ idx := searchValueType(p.SampleType, st)
+ if idx < 0 {
+ return fmt.Errorf("%q sample type is not found in profile", st)
+ }
+ reMap[i] = idx
+ if idx != i {
+ needToModify = true
+ }
+ }
+ if !needToModify && len(sTypes) == len(p.SampleType) {
+ return nil
+ }
+ p.DefaultSampleType = defaultSampleType
+ oldSampleTypes := p.SampleType
+ p.SampleType = make([]*ValueType, len(sTypes))
+ for i, idx := range reMap {
+ p.SampleType[i] = oldSampleTypes[idx]
+ }
+ values := make([]int64, len(sTypes))
+ for _, s := range p.Sample {
+ for i, idx := range reMap {
+ values[i] = s.Value[idx]
+ }
+ s.Value = s.Value[:len(values)]
+ copy(s.Value, values)
+ }
+ return nil
+}
+
+func searchValueType(vts []*ValueType, s string) int {
+ for i, vt := range vts {
+ if vt.Type == s {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/profile.go b/src/cmd/vendor/github.com/google/pprof/profile/profile.go
new file mode 100644
index 0000000..60ef7e9
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/profile.go
@@ -0,0 +1,856 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package profile provides a representation of profile.proto and
+// methods to encode/decode profiles in this format.
+package profile
+
+import (
+ "bytes"
+ "compress/gzip"
+ "fmt"
+ "io"
+ "math"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+)
+
+// Profile is an in-memory representation of profile.proto.
+type Profile struct {
+ SampleType []*ValueType
+ DefaultSampleType string
+ Sample []*Sample
+ Mapping []*Mapping
+ Location []*Location
+ Function []*Function
+ Comments []string
+
+ DropFrames string
+ KeepFrames string
+
+ TimeNanos int64
+ DurationNanos int64
+ PeriodType *ValueType
+ Period int64
+
+ // The following fields are modified during encoding and copying,
+ // so are protected by a Mutex.
+ encodeMu sync.Mutex
+
+ commentX []int64
+ dropFramesX int64
+ keepFramesX int64
+ stringTable []string
+ defaultSampleTypeX int64
+}
+
+// ValueType corresponds to Profile.ValueType
+type ValueType struct {
+ Type string // cpu, wall, inuse_space, etc
+ Unit string // seconds, nanoseconds, bytes, etc
+
+ typeX int64
+ unitX int64
+}
+
+// Sample corresponds to Profile.Sample
+type Sample struct {
+ Location []*Location
+ Value []int64
+ // Label is a per-label-key map to values for string labels.
+ //
+ // In general, having multiple values for the given label key is strongly
+ // discouraged - see docs for the sample label field in profile.proto. The
+ // main reason this unlikely state is tracked here is to make the
+ // decoding->encoding roundtrip not lossy. But we expect that the value
+ // slices present in this map are always of length 1.
+ Label map[string][]string
+ // NumLabel is a per-label-key map to values for numeric labels. See a note
+ // above on handling multiple values for a label.
+ NumLabel map[string][]int64
+ // NumUnit is a per-label-key map to the unit names of corresponding numeric
+ // label values. The unit info may be missing even if the label is in
+ // NumLabel, see the docs in profile.proto for details. When the value is
+ // slice is present and not nil, its length must be equal to the length of
+ // the corresponding value slice in NumLabel.
+ NumUnit map[string][]string
+
+ locationIDX []uint64
+ labelX []label
+}
+
+// label corresponds to Profile.Label
+type label struct {
+ keyX int64
+ // Exactly one of the two following values must be set
+ strX int64
+ numX int64 // Integer value for this label
+ // can be set if numX has value
+ unitX int64
+}
+
+// Mapping corresponds to Profile.Mapping
+type Mapping struct {
+ ID uint64
+ Start uint64
+ Limit uint64
+ Offset uint64
+ File string
+ BuildID string
+ HasFunctions bool
+ HasFilenames bool
+ HasLineNumbers bool
+ HasInlineFrames bool
+
+ fileX int64
+ buildIDX int64
+
+ // Name of the kernel relocation symbol ("_text" or "_stext"), extracted from File.
+ // For linux kernel mappings generated by some tools, correct symbolization depends
+ // on knowing which of the two possible relocation symbols was used for `Start`.
+ // This is given to us as a suffix in `File` (e.g. "[kernel.kallsyms]_stext").
+ //
+ // Note, this public field is not persisted in the proto. For the purposes of
+ // copying / merging / hashing profiles, it is considered subsumed by `File`.
+ KernelRelocationSymbol string
+}
+
+// Location corresponds to Profile.Location
+type Location struct {
+ ID uint64
+ Mapping *Mapping
+ Address uint64
+ Line []Line
+ IsFolded bool
+
+ mappingIDX uint64
+}
+
+// Line corresponds to Profile.Line
+type Line struct {
+ Function *Function
+ Line int64
+
+ functionIDX uint64
+}
+
+// Function corresponds to Profile.Function
+type Function struct {
+ ID uint64
+ Name string
+ SystemName string
+ Filename string
+ StartLine int64
+
+ nameX int64
+ systemNameX int64
+ filenameX int64
+}
+
+// Parse parses a profile and checks for its validity. The input
+// may be a gzip-compressed encoded protobuf or one of many legacy
+// profile formats which may be unsupported in the future.
+func Parse(r io.Reader) (*Profile, error) {
+ data, err := io.ReadAll(r)
+ if err != nil {
+ return nil, err
+ }
+ return ParseData(data)
+}
+
+// ParseData parses a profile from a buffer and checks for its
+// validity.
+func ParseData(data []byte) (*Profile, error) {
+ var p *Profile
+ var err error
+ if len(data) >= 2 && data[0] == 0x1f && data[1] == 0x8b {
+ gz, err := gzip.NewReader(bytes.NewBuffer(data))
+ if err == nil {
+ data, err = io.ReadAll(gz)
+ }
+ if err != nil {
+ return nil, fmt.Errorf("decompressing profile: %v", err)
+ }
+ }
+ if p, err = ParseUncompressed(data); err != nil && err != errNoData && err != errConcatProfile {
+ p, err = parseLegacy(data)
+ }
+
+ if err != nil {
+ return nil, fmt.Errorf("parsing profile: %v", err)
+ }
+
+ if err := p.CheckValid(); err != nil {
+ return nil, fmt.Errorf("malformed profile: %v", err)
+ }
+ return p, nil
+}
+
+var errUnrecognized = fmt.Errorf("unrecognized profile format")
+var errMalformed = fmt.Errorf("malformed profile format")
+var errNoData = fmt.Errorf("empty input file")
+var errConcatProfile = fmt.Errorf("concatenated profiles detected")
+
+func parseLegacy(data []byte) (*Profile, error) {
+ parsers := []func([]byte) (*Profile, error){
+ parseCPU,
+ parseHeap,
+ parseGoCount, // goroutine, threadcreate
+ parseThread,
+ parseContention,
+ parseJavaProfile,
+ }
+
+ for _, parser := range parsers {
+ p, err := parser(data)
+ if err == nil {
+ p.addLegacyFrameInfo()
+ return p, nil
+ }
+ if err != errUnrecognized {
+ return nil, err
+ }
+ }
+ return nil, errUnrecognized
+}
+
+// ParseUncompressed parses an uncompressed protobuf into a profile.
+func ParseUncompressed(data []byte) (*Profile, error) {
+ if len(data) == 0 {
+ return nil, errNoData
+ }
+ p := &Profile{}
+ if err := unmarshal(data, p); err != nil {
+ return nil, err
+ }
+
+ if err := p.postDecode(); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+var libRx = regexp.MustCompile(`([.]so$|[.]so[._][0-9]+)`)
+
+// massageMappings applies heuristic-based changes to the profile
+// mappings to account for quirks of some environments.
+func (p *Profile) massageMappings() {
+ // Merge adjacent regions with matching names, checking that the offsets match
+ if len(p.Mapping) > 1 {
+ mappings := []*Mapping{p.Mapping[0]}
+ for _, m := range p.Mapping[1:] {
+ lm := mappings[len(mappings)-1]
+ if adjacent(lm, m) {
+ lm.Limit = m.Limit
+ if m.File != "" {
+ lm.File = m.File
+ }
+ if m.BuildID != "" {
+ lm.BuildID = m.BuildID
+ }
+ p.updateLocationMapping(m, lm)
+ continue
+ }
+ mappings = append(mappings, m)
+ }
+ p.Mapping = mappings
+ }
+
+ // Use heuristics to identify main binary and move it to the top of the list of mappings
+ for i, m := range p.Mapping {
+ file := strings.TrimSpace(strings.Replace(m.File, "(deleted)", "", -1))
+ if len(file) == 0 {
+ continue
+ }
+ if len(libRx.FindStringSubmatch(file)) > 0 {
+ continue
+ }
+ if file[0] == '[' {
+ continue
+ }
+ // Swap what we guess is main to position 0.
+ p.Mapping[0], p.Mapping[i] = p.Mapping[i], p.Mapping[0]
+ break
+ }
+
+ // Keep the mapping IDs neatly sorted
+ for i, m := range p.Mapping {
+ m.ID = uint64(i + 1)
+ }
+}
+
+// adjacent returns whether two mapping entries represent the same
+// mapping that has been split into two. Check that their addresses are adjacent,
+// and if the offsets match, if they are available.
+func adjacent(m1, m2 *Mapping) bool {
+ if m1.File != "" && m2.File != "" {
+ if m1.File != m2.File {
+ return false
+ }
+ }
+ if m1.BuildID != "" && m2.BuildID != "" {
+ if m1.BuildID != m2.BuildID {
+ return false
+ }
+ }
+ if m1.Limit != m2.Start {
+ return false
+ }
+ if m1.Offset != 0 && m2.Offset != 0 {
+ offset := m1.Offset + (m1.Limit - m1.Start)
+ if offset != m2.Offset {
+ return false
+ }
+ }
+ return true
+}
+
+func (p *Profile) updateLocationMapping(from, to *Mapping) {
+ for _, l := range p.Location {
+ if l.Mapping == from {
+ l.Mapping = to
+ }
+ }
+}
+
+func serialize(p *Profile) []byte {
+ p.encodeMu.Lock()
+ p.preEncode()
+ b := marshal(p)
+ p.encodeMu.Unlock()
+ return b
+}
+
+// Write writes the profile as a gzip-compressed marshaled protobuf.
+func (p *Profile) Write(w io.Writer) error {
+ zw := gzip.NewWriter(w)
+ defer zw.Close()
+ _, err := zw.Write(serialize(p))
+ return err
+}
+
+// WriteUncompressed writes the profile as a marshaled protobuf.
+func (p *Profile) WriteUncompressed(w io.Writer) error {
+ _, err := w.Write(serialize(p))
+ return err
+}
+
+// CheckValid tests whether the profile is valid. Checks include, but are
+// not limited to:
+// - len(Profile.Sample[n].value) == len(Profile.value_unit)
+// - Sample.id has a corresponding Profile.Location
+func (p *Profile) CheckValid() error {
+ // Check that sample values are consistent
+ sampleLen := len(p.SampleType)
+ if sampleLen == 0 && len(p.Sample) != 0 {
+ return fmt.Errorf("missing sample type information")
+ }
+ for _, s := range p.Sample {
+ if s == nil {
+ return fmt.Errorf("profile has nil sample")
+ }
+ if len(s.Value) != sampleLen {
+ return fmt.Errorf("mismatch: sample has %d values vs. %d types", len(s.Value), len(p.SampleType))
+ }
+ for _, l := range s.Location {
+ if l == nil {
+ return fmt.Errorf("sample has nil location")
+ }
+ }
+ }
+
+ // Check that all mappings/locations/functions are in the tables
+ // Check that there are no duplicate ids
+ mappings := make(map[uint64]*Mapping, len(p.Mapping))
+ for _, m := range p.Mapping {
+ if m == nil {
+ return fmt.Errorf("profile has nil mapping")
+ }
+ if m.ID == 0 {
+ return fmt.Errorf("found mapping with reserved ID=0")
+ }
+ if mappings[m.ID] != nil {
+ return fmt.Errorf("multiple mappings with same id: %d", m.ID)
+ }
+ mappings[m.ID] = m
+ }
+ functions := make(map[uint64]*Function, len(p.Function))
+ for _, f := range p.Function {
+ if f == nil {
+ return fmt.Errorf("profile has nil function")
+ }
+ if f.ID == 0 {
+ return fmt.Errorf("found function with reserved ID=0")
+ }
+ if functions[f.ID] != nil {
+ return fmt.Errorf("multiple functions with same id: %d", f.ID)
+ }
+ functions[f.ID] = f
+ }
+ locations := make(map[uint64]*Location, len(p.Location))
+ for _, l := range p.Location {
+ if l == nil {
+ return fmt.Errorf("profile has nil location")
+ }
+ if l.ID == 0 {
+ return fmt.Errorf("found location with reserved id=0")
+ }
+ if locations[l.ID] != nil {
+ return fmt.Errorf("multiple locations with same id: %d", l.ID)
+ }
+ locations[l.ID] = l
+ if m := l.Mapping; m != nil {
+ if m.ID == 0 || mappings[m.ID] != m {
+ return fmt.Errorf("inconsistent mapping %p: %d", m, m.ID)
+ }
+ }
+ for _, ln := range l.Line {
+ f := ln.Function
+ if f == nil {
+ return fmt.Errorf("location id: %d has a line with nil function", l.ID)
+ }
+ if f.ID == 0 || functions[f.ID] != f {
+ return fmt.Errorf("inconsistent function %p: %d", f, f.ID)
+ }
+ }
+ }
+ return nil
+}
+
+// Aggregate merges the locations in the profile into equivalence
+// classes preserving the request attributes. It also updates the
+// samples to point to the merged locations.
+func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address bool) error {
+ for _, m := range p.Mapping {
+ m.HasInlineFrames = m.HasInlineFrames && inlineFrame
+ m.HasFunctions = m.HasFunctions && function
+ m.HasFilenames = m.HasFilenames && filename
+ m.HasLineNumbers = m.HasLineNumbers && linenumber
+ }
+
+ // Aggregate functions
+ if !function || !filename {
+ for _, f := range p.Function {
+ if !function {
+ f.Name = ""
+ f.SystemName = ""
+ }
+ if !filename {
+ f.Filename = ""
+ }
+ }
+ }
+
+ // Aggregate locations
+ if !inlineFrame || !address || !linenumber {
+ for _, l := range p.Location {
+ if !inlineFrame && len(l.Line) > 1 {
+ l.Line = l.Line[len(l.Line)-1:]
+ }
+ if !linenumber {
+ for i := range l.Line {
+ l.Line[i].Line = 0
+ }
+ }
+ if !address {
+ l.Address = 0
+ }
+ }
+ }
+
+ return p.CheckValid()
+}
+
+// NumLabelUnits returns a map of numeric label keys to the units
+// associated with those keys and a map of those keys to any units
+// that were encountered but not used.
+// Unit for a given key is the first encountered unit for that key. If multiple
+// units are encountered for values paired with a particular key, then the first
+// unit encountered is used and all other units are returned in sorted order
+// in map of ignored units.
+// If no units are encountered for a particular key, the unit is then inferred
+// based on the key.
+func (p *Profile) NumLabelUnits() (map[string]string, map[string][]string) {
+ numLabelUnits := map[string]string{}
+ ignoredUnits := map[string]map[string]bool{}
+ encounteredKeys := map[string]bool{}
+
+ // Determine units based on numeric tags for each sample.
+ for _, s := range p.Sample {
+ for k := range s.NumLabel {
+ encounteredKeys[k] = true
+ for _, unit := range s.NumUnit[k] {
+ if unit == "" {
+ continue
+ }
+ if wantUnit, ok := numLabelUnits[k]; !ok {
+ numLabelUnits[k] = unit
+ } else if wantUnit != unit {
+ if v, ok := ignoredUnits[k]; ok {
+ v[unit] = true
+ } else {
+ ignoredUnits[k] = map[string]bool{unit: true}
+ }
+ }
+ }
+ }
+ }
+ // Infer units for keys without any units associated with
+ // numeric tag values.
+ for key := range encounteredKeys {
+ unit := numLabelUnits[key]
+ if unit == "" {
+ switch key {
+ case "alignment", "request":
+ numLabelUnits[key] = "bytes"
+ default:
+ numLabelUnits[key] = key
+ }
+ }
+ }
+
+ // Copy ignored units into more readable format
+ unitsIgnored := make(map[string][]string, len(ignoredUnits))
+ for key, values := range ignoredUnits {
+ units := make([]string, len(values))
+ i := 0
+ for unit := range values {
+ units[i] = unit
+ i++
+ }
+ sort.Strings(units)
+ unitsIgnored[key] = units
+ }
+
+ return numLabelUnits, unitsIgnored
+}
+
+// String dumps a text representation of a profile. Intended mainly
+// for debugging purposes.
+func (p *Profile) String() string {
+ ss := make([]string, 0, len(p.Comments)+len(p.Sample)+len(p.Mapping)+len(p.Location))
+ for _, c := range p.Comments {
+ ss = append(ss, "Comment: "+c)
+ }
+ if pt := p.PeriodType; pt != nil {
+ ss = append(ss, fmt.Sprintf("PeriodType: %s %s", pt.Type, pt.Unit))
+ }
+ ss = append(ss, fmt.Sprintf("Period: %d", p.Period))
+ if p.TimeNanos != 0 {
+ ss = append(ss, fmt.Sprintf("Time: %v", time.Unix(0, p.TimeNanos)))
+ }
+ if p.DurationNanos != 0 {
+ ss = append(ss, fmt.Sprintf("Duration: %.4v", time.Duration(p.DurationNanos)))
+ }
+
+ ss = append(ss, "Samples:")
+ var sh1 string
+ for _, s := range p.SampleType {
+ dflt := ""
+ if s.Type == p.DefaultSampleType {
+ dflt = "[dflt]"
+ }
+ sh1 = sh1 + fmt.Sprintf("%s/%s%s ", s.Type, s.Unit, dflt)
+ }
+ ss = append(ss, strings.TrimSpace(sh1))
+ for _, s := range p.Sample {
+ ss = append(ss, s.string())
+ }
+
+ ss = append(ss, "Locations")
+ for _, l := range p.Location {
+ ss = append(ss, l.string())
+ }
+
+ ss = append(ss, "Mappings")
+ for _, m := range p.Mapping {
+ ss = append(ss, m.string())
+ }
+
+ return strings.Join(ss, "\n") + "\n"
+}
+
+// string dumps a text representation of a mapping. Intended mainly
+// for debugging purposes.
+func (m *Mapping) string() string {
+ bits := ""
+ if m.HasFunctions {
+ bits = bits + "[FN]"
+ }
+ if m.HasFilenames {
+ bits = bits + "[FL]"
+ }
+ if m.HasLineNumbers {
+ bits = bits + "[LN]"
+ }
+ if m.HasInlineFrames {
+ bits = bits + "[IN]"
+ }
+ return fmt.Sprintf("%d: %#x/%#x/%#x %s %s %s",
+ m.ID,
+ m.Start, m.Limit, m.Offset,
+ m.File,
+ m.BuildID,
+ bits)
+}
+
+// string dumps a text representation of a location. Intended mainly
+// for debugging purposes.
+func (l *Location) string() string {
+ ss := []string{}
+ locStr := fmt.Sprintf("%6d: %#x ", l.ID, l.Address)
+ if m := l.Mapping; m != nil {
+ locStr = locStr + fmt.Sprintf("M=%d ", m.ID)
+ }
+ if l.IsFolded {
+ locStr = locStr + "[F] "
+ }
+ if len(l.Line) == 0 {
+ ss = append(ss, locStr)
+ }
+ for li := range l.Line {
+ lnStr := "??"
+ if fn := l.Line[li].Function; fn != nil {
+ lnStr = fmt.Sprintf("%s %s:%d s=%d",
+ fn.Name,
+ fn.Filename,
+ l.Line[li].Line,
+ fn.StartLine)
+ if fn.Name != fn.SystemName {
+ lnStr = lnStr + "(" + fn.SystemName + ")"
+ }
+ }
+ ss = append(ss, locStr+lnStr)
+ // Do not print location details past the first line
+ locStr = " "
+ }
+ return strings.Join(ss, "\n")
+}
+
+// string dumps a text representation of a sample. Intended mainly
+// for debugging purposes.
+func (s *Sample) string() string {
+ ss := []string{}
+ var sv string
+ for _, v := range s.Value {
+ sv = fmt.Sprintf("%s %10d", sv, v)
+ }
+ sv = sv + ": "
+ for _, l := range s.Location {
+ sv = sv + fmt.Sprintf("%d ", l.ID)
+ }
+ ss = append(ss, sv)
+ const labelHeader = " "
+ if len(s.Label) > 0 {
+ ss = append(ss, labelHeader+labelsToString(s.Label))
+ }
+ if len(s.NumLabel) > 0 {
+ ss = append(ss, labelHeader+numLabelsToString(s.NumLabel, s.NumUnit))
+ }
+ return strings.Join(ss, "\n")
+}
+
+// labelsToString returns a string representation of a
+// map representing labels.
+func labelsToString(labels map[string][]string) string {
+ ls := []string{}
+ for k, v := range labels {
+ ls = append(ls, fmt.Sprintf("%s:%v", k, v))
+ }
+ sort.Strings(ls)
+ return strings.Join(ls, " ")
+}
+
+// numLabelsToString returns a string representation of a map
+// representing numeric labels.
+func numLabelsToString(numLabels map[string][]int64, numUnits map[string][]string) string {
+ ls := []string{}
+ for k, v := range numLabels {
+ units := numUnits[k]
+ var labelString string
+ if len(units) == len(v) {
+ values := make([]string, len(v))
+ for i, vv := range v {
+ values[i] = fmt.Sprintf("%d %s", vv, units[i])
+ }
+ labelString = fmt.Sprintf("%s:%v", k, values)
+ } else {
+ labelString = fmt.Sprintf("%s:%v", k, v)
+ }
+ ls = append(ls, labelString)
+ }
+ sort.Strings(ls)
+ return strings.Join(ls, " ")
+}
+
+// SetLabel sets the specified key to the specified value for all samples in the
+// profile.
+func (p *Profile) SetLabel(key string, value []string) {
+ for _, sample := range p.Sample {
+ if sample.Label == nil {
+ sample.Label = map[string][]string{key: value}
+ } else {
+ sample.Label[key] = value
+ }
+ }
+}
+
+// RemoveLabel removes all labels associated with the specified key for all
+// samples in the profile.
+func (p *Profile) RemoveLabel(key string) {
+ for _, sample := range p.Sample {
+ delete(sample.Label, key)
+ }
+}
+
+// HasLabel returns true if a sample has a label with indicated key and value.
+func (s *Sample) HasLabel(key, value string) bool {
+ for _, v := range s.Label[key] {
+ if v == value {
+ return true
+ }
+ }
+ return false
+}
+
+// SetNumLabel sets the specified key to the specified value for all samples in the
+// profile. "unit" is a slice that describes the units that each corresponding member
+// of "values" is measured in (e.g. bytes or seconds). If there is no relevant
+// unit for a given value, that member of "unit" should be the empty string.
+// "unit" must either have the same length as "value", or be nil.
+func (p *Profile) SetNumLabel(key string, value []int64, unit []string) {
+ for _, sample := range p.Sample {
+ if sample.NumLabel == nil {
+ sample.NumLabel = map[string][]int64{key: value}
+ } else {
+ sample.NumLabel[key] = value
+ }
+ if sample.NumUnit == nil {
+ sample.NumUnit = map[string][]string{key: unit}
+ } else {
+ sample.NumUnit[key] = unit
+ }
+ }
+}
+
+// RemoveNumLabel removes all numerical labels associated with the specified key for all
+// samples in the profile.
+func (p *Profile) RemoveNumLabel(key string) {
+ for _, sample := range p.Sample {
+ delete(sample.NumLabel, key)
+ delete(sample.NumUnit, key)
+ }
+}
+
+// DiffBaseSample returns true if a sample belongs to the diff base and false
+// otherwise.
+func (s *Sample) DiffBaseSample() bool {
+ return s.HasLabel("pprof::base", "true")
+}
+
+// Scale multiplies all sample values in a profile by a constant and keeps
+// only samples that have at least one non-zero value.
+func (p *Profile) Scale(ratio float64) {
+ if ratio == 1 {
+ return
+ }
+ ratios := make([]float64, len(p.SampleType))
+ for i := range p.SampleType {
+ ratios[i] = ratio
+ }
+ p.ScaleN(ratios)
+}
+
+// ScaleN multiplies each sample values in a sample by a different amount
+// and keeps only samples that have at least one non-zero value.
+func (p *Profile) ScaleN(ratios []float64) error {
+ if len(p.SampleType) != len(ratios) {
+ return fmt.Errorf("mismatched scale ratios, got %d, want %d", len(ratios), len(p.SampleType))
+ }
+ allOnes := true
+ for _, r := range ratios {
+ if r != 1 {
+ allOnes = false
+ break
+ }
+ }
+ if allOnes {
+ return nil
+ }
+ fillIdx := 0
+ for _, s := range p.Sample {
+ keepSample := false
+ for i, v := range s.Value {
+ if ratios[i] != 1 {
+ val := int64(math.Round(float64(v) * ratios[i]))
+ s.Value[i] = val
+ keepSample = keepSample || val != 0
+ }
+ }
+ if keepSample {
+ p.Sample[fillIdx] = s
+ fillIdx++
+ }
+ }
+ p.Sample = p.Sample[:fillIdx]
+ return nil
+}
+
+// HasFunctions determines if all locations in this profile have
+// symbolized function information.
+func (p *Profile) HasFunctions() bool {
+ for _, l := range p.Location {
+ if l.Mapping != nil && !l.Mapping.HasFunctions {
+ return false
+ }
+ }
+ return true
+}
+
+// HasFileLines determines if all locations in this profile have
+// symbolized file and line number information.
+func (p *Profile) HasFileLines() bool {
+ for _, l := range p.Location {
+ if l.Mapping != nil && (!l.Mapping.HasFilenames || !l.Mapping.HasLineNumbers) {
+ return false
+ }
+ }
+ return true
+}
+
+// Unsymbolizable returns true if a mapping points to a binary for which
+// locations can't be symbolized in principle, at least now. Examples are
+// "[vdso]", [vsyscall]" and some others, see the code.
+func (m *Mapping) Unsymbolizable() bool {
+ name := filepath.Base(m.File)
+ return strings.HasPrefix(name, "[") || strings.HasPrefix(name, "linux-vdso") || strings.HasPrefix(m.File, "/dev/dri/")
+}
+
+// Copy makes a fully independent copy of a profile.
+func (p *Profile) Copy() *Profile {
+ pp := &Profile{}
+ if err := unmarshal(serialize(p), pp); err != nil {
+ panic(err)
+ }
+ if err := pp.postDecode(); err != nil {
+ panic(err)
+ }
+
+ return pp
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/proto.go b/src/cmd/vendor/github.com/google/pprof/profile/proto.go
new file mode 100644
index 0000000..a15696b
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/proto.go
@@ -0,0 +1,367 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is a simple protocol buffer encoder and decoder.
+// The format is described at
+// https://developers.google.com/protocol-buffers/docs/encoding
+//
+// A protocol message must implement the message interface:
+// decoder() []decoder
+// encode(*buffer)
+//
+// The decode method returns a slice indexed by field number that gives the
+// function to decode that field.
+// The encode method encodes its receiver into the given buffer.
+//
+// The two methods are simple enough to be implemented by hand rather than
+// by using a protocol compiler.
+//
+// See profile.go for examples of messages implementing this interface.
+//
+// There is no support for groups, message sets, or "has" bits.
+
+package profile
+
+import (
+ "errors"
+ "fmt"
+)
+
+type buffer struct {
+ field int // field tag
+ typ int // proto wire type code for field
+ u64 uint64
+ data []byte
+ tmp [16]byte
+ tmpLines []Line // temporary storage used while decoding "repeated Line".
+}
+
+type decoder func(*buffer, message) error
+
+type message interface {
+ decoder() []decoder
+ encode(*buffer)
+}
+
+func marshal(m message) []byte {
+ var b buffer
+ m.encode(&b)
+ return b.data
+}
+
+func encodeVarint(b *buffer, x uint64) {
+ for x >= 128 {
+ b.data = append(b.data, byte(x)|0x80)
+ x >>= 7
+ }
+ b.data = append(b.data, byte(x))
+}
+
+func encodeLength(b *buffer, tag int, len int) {
+ encodeVarint(b, uint64(tag)<<3|2)
+ encodeVarint(b, uint64(len))
+}
+
+func encodeUint64(b *buffer, tag int, x uint64) {
+ // append varint to b.data
+ encodeVarint(b, uint64(tag)<<3)
+ encodeVarint(b, x)
+}
+
+func encodeUint64s(b *buffer, tag int, x []uint64) {
+ if len(x) > 2 {
+ // Use packed encoding
+ n1 := len(b.data)
+ for _, u := range x {
+ encodeVarint(b, u)
+ }
+ n2 := len(b.data)
+ encodeLength(b, tag, n2-n1)
+ n3 := len(b.data)
+ copy(b.tmp[:], b.data[n2:n3])
+ copy(b.data[n1+(n3-n2):], b.data[n1:n2])
+ copy(b.data[n1:], b.tmp[:n3-n2])
+ return
+ }
+ for _, u := range x {
+ encodeUint64(b, tag, u)
+ }
+}
+
+func encodeUint64Opt(b *buffer, tag int, x uint64) {
+ if x == 0 {
+ return
+ }
+ encodeUint64(b, tag, x)
+}
+
+func encodeInt64(b *buffer, tag int, x int64) {
+ u := uint64(x)
+ encodeUint64(b, tag, u)
+}
+
+func encodeInt64s(b *buffer, tag int, x []int64) {
+ if len(x) > 2 {
+ // Use packed encoding
+ n1 := len(b.data)
+ for _, u := range x {
+ encodeVarint(b, uint64(u))
+ }
+ n2 := len(b.data)
+ encodeLength(b, tag, n2-n1)
+ n3 := len(b.data)
+ copy(b.tmp[:], b.data[n2:n3])
+ copy(b.data[n1+(n3-n2):], b.data[n1:n2])
+ copy(b.data[n1:], b.tmp[:n3-n2])
+ return
+ }
+ for _, u := range x {
+ encodeInt64(b, tag, u)
+ }
+}
+
+func encodeInt64Opt(b *buffer, tag int, x int64) {
+ if x == 0 {
+ return
+ }
+ encodeInt64(b, tag, x)
+}
+
+func encodeString(b *buffer, tag int, x string) {
+ encodeLength(b, tag, len(x))
+ b.data = append(b.data, x...)
+}
+
+func encodeStrings(b *buffer, tag int, x []string) {
+ for _, s := range x {
+ encodeString(b, tag, s)
+ }
+}
+
+func encodeBool(b *buffer, tag int, x bool) {
+ if x {
+ encodeUint64(b, tag, 1)
+ } else {
+ encodeUint64(b, tag, 0)
+ }
+}
+
+func encodeBoolOpt(b *buffer, tag int, x bool) {
+ if x {
+ encodeBool(b, tag, x)
+ }
+}
+
+func encodeMessage(b *buffer, tag int, m message) {
+ n1 := len(b.data)
+ m.encode(b)
+ n2 := len(b.data)
+ encodeLength(b, tag, n2-n1)
+ n3 := len(b.data)
+ copy(b.tmp[:], b.data[n2:n3])
+ copy(b.data[n1+(n3-n2):], b.data[n1:n2])
+ copy(b.data[n1:], b.tmp[:n3-n2])
+}
+
+func unmarshal(data []byte, m message) (err error) {
+ b := buffer{data: data, typ: 2}
+ return decodeMessage(&b, m)
+}
+
+func le64(p []byte) uint64 {
+ return uint64(p[0]) | uint64(p[1])<<8 | uint64(p[2])<<16 | uint64(p[3])<<24 | uint64(p[4])<<32 | uint64(p[5])<<40 | uint64(p[6])<<48 | uint64(p[7])<<56
+}
+
+func le32(p []byte) uint32 {
+ return uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24
+}
+
+func decodeVarint(data []byte) (uint64, []byte, error) {
+ var u uint64
+ for i := 0; ; i++ {
+ if i >= 10 || i >= len(data) {
+ return 0, nil, errors.New("bad varint")
+ }
+ u |= uint64(data[i]&0x7F) << uint(7*i)
+ if data[i]&0x80 == 0 {
+ return u, data[i+1:], nil
+ }
+ }
+}
+
+func decodeField(b *buffer, data []byte) ([]byte, error) {
+ x, data, err := decodeVarint(data)
+ if err != nil {
+ return nil, err
+ }
+ b.field = int(x >> 3)
+ b.typ = int(x & 7)
+ b.data = nil
+ b.u64 = 0
+ switch b.typ {
+ case 0:
+ b.u64, data, err = decodeVarint(data)
+ if err != nil {
+ return nil, err
+ }
+ case 1:
+ if len(data) < 8 {
+ return nil, errors.New("not enough data")
+ }
+ b.u64 = le64(data[:8])
+ data = data[8:]
+ case 2:
+ var n uint64
+ n, data, err = decodeVarint(data)
+ if err != nil {
+ return nil, err
+ }
+ if n > uint64(len(data)) {
+ return nil, errors.New("too much data")
+ }
+ b.data = data[:n]
+ data = data[n:]
+ case 5:
+ if len(data) < 4 {
+ return nil, errors.New("not enough data")
+ }
+ b.u64 = uint64(le32(data[:4]))
+ data = data[4:]
+ default:
+ return nil, fmt.Errorf("unknown wire type: %d", b.typ)
+ }
+
+ return data, nil
+}
+
+func checkType(b *buffer, typ int) error {
+ if b.typ != typ {
+ return errors.New("type mismatch")
+ }
+ return nil
+}
+
+func decodeMessage(b *buffer, m message) error {
+ if err := checkType(b, 2); err != nil {
+ return err
+ }
+ dec := m.decoder()
+ data := b.data
+ for len(data) > 0 {
+ // pull varint field# + type
+ var err error
+ data, err = decodeField(b, data)
+ if err != nil {
+ return err
+ }
+ if b.field >= len(dec) || dec[b.field] == nil {
+ continue
+ }
+ if err := dec[b.field](b, m); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func decodeInt64(b *buffer, x *int64) error {
+ if err := checkType(b, 0); err != nil {
+ return err
+ }
+ *x = int64(b.u64)
+ return nil
+}
+
+func decodeInt64s(b *buffer, x *[]int64) error {
+ if b.typ == 2 {
+ // Packed encoding
+ data := b.data
+ for len(data) > 0 {
+ var u uint64
+ var err error
+
+ if u, data, err = decodeVarint(data); err != nil {
+ return err
+ }
+ *x = append(*x, int64(u))
+ }
+ return nil
+ }
+ var i int64
+ if err := decodeInt64(b, &i); err != nil {
+ return err
+ }
+ *x = append(*x, i)
+ return nil
+}
+
+func decodeUint64(b *buffer, x *uint64) error {
+ if err := checkType(b, 0); err != nil {
+ return err
+ }
+ *x = b.u64
+ return nil
+}
+
+func decodeUint64s(b *buffer, x *[]uint64) error {
+ if b.typ == 2 {
+ data := b.data
+ // Packed encoding
+ for len(data) > 0 {
+ var u uint64
+ var err error
+
+ if u, data, err = decodeVarint(data); err != nil {
+ return err
+ }
+ *x = append(*x, u)
+ }
+ return nil
+ }
+ var u uint64
+ if err := decodeUint64(b, &u); err != nil {
+ return err
+ }
+ *x = append(*x, u)
+ return nil
+}
+
+func decodeString(b *buffer, x *string) error {
+ if err := checkType(b, 2); err != nil {
+ return err
+ }
+ *x = string(b.data)
+ return nil
+}
+
+func decodeStrings(b *buffer, x *[]string) error {
+ var s string
+ if err := decodeString(b, &s); err != nil {
+ return err
+ }
+ *x = append(*x, s)
+ return nil
+}
+
+func decodeBool(b *buffer, x *bool) error {
+ if err := checkType(b, 0); err != nil {
+ return err
+ }
+ if int64(b.u64) == 0 {
+ *x = false
+ } else {
+ *x = true
+ }
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/prune.go b/src/cmd/vendor/github.com/google/pprof/profile/prune.go
new file mode 100644
index 0000000..b2f9fd5
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/prune.go
@@ -0,0 +1,194 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Implements methods to remove frames from profiles.
+
+package profile
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+var (
+ reservedNames = []string{"(anonymous namespace)", "operator()"}
+ bracketRx = func() *regexp.Regexp {
+ var quotedNames []string
+ for _, name := range append(reservedNames, "(") {
+ quotedNames = append(quotedNames, regexp.QuoteMeta(name))
+ }
+ return regexp.MustCompile(strings.Join(quotedNames, "|"))
+ }()
+)
+
+// simplifyFunc does some primitive simplification of function names.
+func simplifyFunc(f string) string {
+ // Account for leading '.' on the PPC ELF v1 ABI.
+ funcName := strings.TrimPrefix(f, ".")
+ // Account for unsimplified names -- try to remove the argument list by trimming
+ // starting from the first '(', but skipping reserved names that have '('.
+ for _, ind := range bracketRx.FindAllStringSubmatchIndex(funcName, -1) {
+ foundReserved := false
+ for _, res := range reservedNames {
+ if funcName[ind[0]:ind[1]] == res {
+ foundReserved = true
+ break
+ }
+ }
+ if !foundReserved {
+ funcName = funcName[:ind[0]]
+ break
+ }
+ }
+ return funcName
+}
+
+// Prune removes all nodes beneath a node matching dropRx, and not
+// matching keepRx. If the root node of a Sample matches, the sample
+// will have an empty stack.
+func (p *Profile) Prune(dropRx, keepRx *regexp.Regexp) {
+ prune := make(map[uint64]bool)
+ pruneBeneath := make(map[uint64]bool)
+
+ // simplifyFunc can be expensive, so cache results.
+ // Note that the same function name can be encountered many times due
+ // different lines and addresses in the same function.
+ pruneCache := map[string]bool{} // Map from function to whether or not to prune
+ pruneFromHere := func(s string) bool {
+ if r, ok := pruneCache[s]; ok {
+ return r
+ }
+ funcName := simplifyFunc(s)
+ if dropRx.MatchString(funcName) {
+ if keepRx == nil || !keepRx.MatchString(funcName) {
+ pruneCache[s] = true
+ return true
+ }
+ }
+ pruneCache[s] = false
+ return false
+ }
+
+ for _, loc := range p.Location {
+ var i int
+ for i = len(loc.Line) - 1; i >= 0; i-- {
+ if fn := loc.Line[i].Function; fn != nil && fn.Name != "" {
+ if pruneFromHere(fn.Name) {
+ break
+ }
+ }
+ }
+
+ if i >= 0 {
+ // Found matching entry to prune.
+ pruneBeneath[loc.ID] = true
+
+ // Remove the matching location.
+ if i == len(loc.Line)-1 {
+ // Matched the top entry: prune the whole location.
+ prune[loc.ID] = true
+ } else {
+ loc.Line = loc.Line[i+1:]
+ }
+ }
+ }
+
+ // Prune locs from each Sample
+ for _, sample := range p.Sample {
+ // Scan from the root to the leaves to find the prune location.
+ // Do not prune frames before the first user frame, to avoid
+ // pruning everything.
+ foundUser := false
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ id := sample.Location[i].ID
+ if !prune[id] && !pruneBeneath[id] {
+ foundUser = true
+ continue
+ }
+ if !foundUser {
+ continue
+ }
+ if prune[id] {
+ sample.Location = sample.Location[i+1:]
+ break
+ }
+ if pruneBeneath[id] {
+ sample.Location = sample.Location[i:]
+ break
+ }
+ }
+ }
+}
+
+// RemoveUninteresting prunes and elides profiles using built-in
+// tables of uninteresting function names.
+func (p *Profile) RemoveUninteresting() error {
+ var keep, drop *regexp.Regexp
+ var err error
+
+ if p.DropFrames != "" {
+ if drop, err = regexp.Compile("^(" + p.DropFrames + ")$"); err != nil {
+ return fmt.Errorf("failed to compile regexp %s: %v", p.DropFrames, err)
+ }
+ if p.KeepFrames != "" {
+ if keep, err = regexp.Compile("^(" + p.KeepFrames + ")$"); err != nil {
+ return fmt.Errorf("failed to compile regexp %s: %v", p.KeepFrames, err)
+ }
+ }
+ p.Prune(drop, keep)
+ }
+ return nil
+}
+
+// PruneFrom removes all nodes beneath the lowest node matching dropRx, not including itself.
+//
+// Please see the example below to understand this method as well as
+// the difference from Prune method.
+//
+// A sample contains Location of [A,B,C,B,D] where D is the top frame and there's no inline.
+//
+// PruneFrom(A) returns [A,B,C,B,D] because there's no node beneath A.
+// Prune(A, nil) returns [B,C,B,D] by removing A itself.
+//
+// PruneFrom(B) returns [B,C,B,D] by removing all nodes beneath the first B when scanning from the bottom.
+// Prune(B, nil) returns [D] because a matching node is found by scanning from the root.
+func (p *Profile) PruneFrom(dropRx *regexp.Regexp) {
+ pruneBeneath := make(map[uint64]bool)
+
+ for _, loc := range p.Location {
+ for i := 0; i < len(loc.Line); i++ {
+ if fn := loc.Line[i].Function; fn != nil && fn.Name != "" {
+ funcName := simplifyFunc(fn.Name)
+ if dropRx.MatchString(funcName) {
+ // Found matching entry to prune.
+ pruneBeneath[loc.ID] = true
+ loc.Line = loc.Line[i:]
+ break
+ }
+ }
+ }
+ }
+
+ // Prune locs from each Sample
+ for _, sample := range p.Sample {
+ // Scan from the bottom leaf to the root to find the prune location.
+ for i, loc := range sample.Location {
+ if pruneBeneath[loc.ID] {
+ sample.Location = sample.Location[i:]
+ break
+ }
+ }
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_FLAME_GRAPH_LICENSE b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_FLAME_GRAPH_LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_FLAME_GRAPH_LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_LICENSE b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_LICENSE
new file mode 100644
index 0000000..b014515
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/D3_LICENSE
@@ -0,0 +1,13 @@
+Copyright 2010-2021 Mike Bostock
+
+Permission to use, copy, modify, and/or distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright notice
+and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
+THIS SOFTWARE.
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/README.md b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/README.md
new file mode 100644
index 0000000..eb84b68
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/README.md
@@ -0,0 +1,33 @@
+# Building a customized D3.js bundle
+
+The D3.js version distributed with pprof is customized to only include the
+modules required by pprof.
+
+## Dependencies
+
+- Install [npm](https://www.npmjs.com).
+
+## Building
+
+- Run `update.sh` to:
+ - Download npm package dependencies (declared in `package.json` and `package-lock.json`)
+ - Create a d3.js bundle containing the JavScript of d3 and d3-flame-graph (by running `webpack`)
+
+This will `d3_flame_graph.go`, the minified custom D3.js bundle as Go source code.
+
+# References / Appendix
+
+## D3 Custom Bundle
+
+A demonstration of building a custom D3 4.0 bundle using ES2015 modules and Rollup.
+
+[bl.ocks.org/mbostock/bb09af4c39c79cffcde4](https://bl.ocks.org/mbostock/bb09af4c39c79cffcde4)
+
+## Old version of d3-pprof
+
+A previous version of d3-flame-graph bundled for pprof used Rollup instead of
+Webpack. This has now been migrated directly into this directory.
+
+The repository configuring Rollup was here:
+
+[github.com/spiermar/d3-pprof](https://github.com/spiermar/d3-pprof)
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go
new file mode 100644
index 0000000..7e27941
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go
@@ -0,0 +1,65 @@
+// D3.js is a JavaScript library for manipulating documents based on data.
+// https://github.com/d3/d3
+// See D3_LICENSE file for license details
+
+// d3-flame-graph is a D3.js plugin that produces flame graphs from hierarchical data.
+// https://github.com/spiermar/d3-flame-graph
+// See D3_FLAME_GRAPH_LICENSE file for license details
+
+package d3flamegraph
+
+// JSSource returns the d3 and d3-flame-graph JavaScript bundle
+const JSSource = `
+
+!function(t,n){if("object"==typeof exports&&"object"==typeof module)module.exports=n();else if("function"==typeof define&&define.amd)define([],n);else{var e=n();for(var r in e)("object"==typeof exports?exports:t)[r]=e[r]}}(self,(function(){return(()=>{"use strict";var t={d:(n,e)=>{for(var r in e)t.o(e,r)&&!t.o(n,r)&&Object.defineProperty(n,r,{enumerable:!0,get:e[r]})},o:(t,n)=>Object.prototype.hasOwnProperty.call(t,n),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},n={};function e(){}function r(t){return null==t?e:function(){return this.querySelector(t)}}function i(t){return null==t?[]:Array.isArray(t)?t:Array.from(t)}function o(){return[]}function u(t){return function(n){return n.matches(t)}}t.r(n),t.d(n,{flamegraph:()=>ji,select:()=>pt});var a=Array.prototype.find;function l(){return this.firstElementChild}var s=Array.prototype.filter;function c(){return Array.from(this.children)}function f(t){return new Array(t.length)}function h(t,n){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=n}function p(t){return function(){return t}}function d(t,n,e,r,i,o){for(var u,a=0,l=n.length,s=o.length;a<s;++a)(u=n[a])?(u.__data__=o[a],r[a]=u):e[a]=new h(t,o[a]);for(;a<l;++a)(u=n[a])&&(i[a]=u)}function g(t,n,e,r,i,o,u){var a,l,s,c=new Map,f=n.length,p=o.length,d=new Array(f);for(a=0;a<f;++a)(l=n[a])&&(d[a]=s=u.call(l,l.__data__,a,n)+"",c.has(s)?i[a]=l:c.set(s,l));for(a=0;a<p;++a)s=u.call(t,o[a],a,o)+"",(l=c.get(s))?(r[a]=l,l.__data__=o[a],c.delete(s)):e[a]=new h(t,o[a]);for(a=0;a<f;++a)(l=n[a])&&c.get(d[a])===l&&(i[a]=l)}function v(t){return t.__data__}function y(t){return"object"==typeof t&&"length"in t?t:Array.from(t)}function m(t,n){return t<n?-1:t>n?1:t>=n?0:NaN}h.prototype={constructor:h,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,n){return this._parent.insertBefore(t,n)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var _="http://www.w3.org/1999/xhtml";const w={svg:"http://www.w3.org/2000/svg",xhtml:_,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function b(t){var n=t+="",e=n.indexOf(":");return e>=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),w.hasOwnProperty(n)?{space:w[n],local:t}:t}function x(t){return function(){this.removeAttribute(t)}}function M(t){return function(){this.removeAttributeNS(t.space,t.local)}}function A(t,n){return function(){this.setAttribute(t,n)}}function N(t,n){return function(){this.setAttributeNS(t.space,t.local,n)}}function E(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttribute(t):this.setAttribute(t,e)}}function k(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,e)}}function S(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function C(t){return function(){this.style.removeProperty(t)}}function P(t,n,e){return function(){this.style.setProperty(t,n,e)}}function j(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}function q(t,n){return t.style.getPropertyValue(n)||S(t).getComputedStyle(t,null).getPropertyValue(n)}function O(t){return function(){delete this[t]}}function L(t,n){return function(){this[t]=n}}function T(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}function B(t){return t.trim().split(/^|\s+/)}function D(t){return t.classList||new H(t)}function H(t){this._node=t,this._names=B(t.getAttribute("class")||"")}function R(t,n){for(var e=D(t),r=-1,i=n.length;++r<i;)e.add(n[r])}function V(t,n){for(var e=D(t),r=-1,i=n.length;++r<i;)e.remove(n[r])}function X(t){return function(){R(this,t)}}function z(t){return function(){V(this,t)}}function I(t,n){return function(){(n.apply(this,arguments)?R:V)(this,t)}}function $(){this.textContent=""}function U(t){return function(){this.textContent=t}}function Y(t){return function(){var n=t.apply(this,arguments);this.textContent=null==n?"":n}}function F(){this.innerHTML=""}function Z(t){return function(){this.innerHTML=t}}function G(t){return function(){var n=t.apply(this,arguments);this.innerHTML=null==n?"":n}}function J(){this.nextSibling&&this.parentNode.appendChild(this)}function K(){this.previousSibling&&this.parentNode.insertBefore(this,this.parentNode.firstChild)}function Q(t){return function(){var n=this.ownerDocument,e=this.namespaceURI;return e===_&&n.documentElement.namespaceURI===_?n.createElement(t):n.createElementNS(e,t)}}function W(t){return function(){return this.ownerDocument.createElementNS(t.space,t.local)}}function tt(t){var n=b(t);return(n.local?W:Q)(n)}function nt(){return null}function et(){var t=this.parentNode;t&&t.removeChild(this)}function rt(){var t=this.cloneNode(!1),n=this.parentNode;return n?n.insertBefore(t,this.nextSibling):t}function it(){var t=this.cloneNode(!0),n=this.parentNode;return n?n.insertBefore(t,this.nextSibling):t}function ot(t){return t.trim().split(/^|\s+/).map((function(t){var n="",e=t.indexOf(".");return e>=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}}))}function ut(t){return function(){var n=this.__on;if(n){for(var e,r=0,i=-1,o=n.length;r<o;++r)e=n[r],t.type&&e.type!==t.type||e.name!==t.name?n[++i]=e:this.removeEventListener(e.type,e.listener,e.options);++i?n.length=i:delete this.__on}}}function at(t,n,e){return function(){var r,i=this.__on,o=function(t){return function(n){t.call(this,n,this.__data__)}}(n);if(i)for(var u=0,a=i.length;u<a;++u)if((r=i[u]).type===t.type&&r.name===t.name)return this.removeEventListener(r.type,r.listener,r.options),this.addEventListener(r.type,r.listener=o,r.options=e),void(r.value=n);this.addEventListener(t.type,o,e),r={type:t.type,name:t.name,value:n,listener:o,options:e},i?i.push(r):this.__on=[r]}}function lt(t,n,e){var r=S(t),i=r.CustomEvent;"function"==typeof i?i=new i(n,e):(i=r.document.createEvent("Event"),e?(i.initEvent(n,e.bubbles,e.cancelable),i.detail=e.detail):i.initEvent(n,!1,!1)),t.dispatchEvent(i)}function st(t,n){return function(){return lt(this,t,n)}}function ct(t,n){return function(){return lt(this,t,n.apply(this,arguments))}}H.prototype={add:function(t){this._names.indexOf(t)<0&&(this._names.push(t),this._node.setAttribute("class",this._names.join(" ")))},remove:function(t){var n=this._names.indexOf(t);n>=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var ft=[null];function ht(t,n){this._groups=t,this._parents=n}function pt(t){return"string"==typeof t?new ht([[document.querySelector(t)]],[document.documentElement]):new ht([[t]],ft)}function dt(){}function gt(t){return null==t?dt:function(){return this.querySelector(t)}}function vt(t){return null==t?[]:Array.isArray(t)?t:Array.from(t)}function yt(){return[]}function mt(t){return null==t?yt:function(){return this.querySelectorAll(t)}}function _t(t){return function(){return this.matches(t)}}function wt(t){return function(n){return n.matches(t)}}ht.prototype=function(){return new ht([[document.documentElement]],ft)}.prototype={constructor:ht,select:function(t){"function"!=typeof t&&(t=r(t));for(var n=this._groups,e=n.length,i=new Array(e),o=0;o<e;++o)for(var u,a,l=n[o],s=l.length,c=i[o]=new Array(s),f=0;f<s;++f)(u=l[f])&&(a=t.call(u,u.__data__,f,l))&&("__data__"in u&&(a.__data__=u.__data__),c[f]=a);return new ht(i,this._parents)},selectAll:function(t){t="function"==typeof t?function(t){return function(){return i(t.apply(this,arguments))}}(t):function(t){return null==t?o:function(){return this.querySelectorAll(t)}}(t);for(var n=this._groups,e=n.length,r=[],u=[],a=0;a<e;++a)for(var l,s=n[a],c=s.length,f=0;f<c;++f)(l=s[f])&&(r.push(t.call(l,l.__data__,f,s)),u.push(l));return new ht(r,u)},selectChild:function(t){return this.select(null==t?l:function(t){return function(){return a.call(this.children,t)}}("function"==typeof t?t:u(t)))},selectChildren:function(t){return this.selectAll(null==t?c:function(t){return function(){return s.call(this.children,t)}}("function"==typeof t?t:u(t)))},filter:function(t){"function"!=typeof t&&(t=function(t){return function(){return this.matches(t)}}(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i<e;++i)for(var o,u=n[i],a=u.length,l=r[i]=[],s=0;s<a;++s)(o=u[s])&&t.call(o,o.__data__,s,u)&&l.push(o);return new ht(r,this._parents)},data:function(t,n){if(!arguments.length)return Array.from(this,v);var e=n?g:d,r=this._parents,i=this._groups;"function"!=typeof t&&(t=p(t));for(var o=i.length,u=new Array(o),a=new Array(o),l=new Array(o),s=0;s<o;++s){var c=r[s],f=i[s],h=f.length,m=y(t.call(c,c&&c.__data__,s,r)),_=m.length,w=a[s]=new Array(_),b=u[s]=new Array(_),x=l[s]=new Array(h);e(c,f,w,b,x,m,n);for(var M,A,N=0,E=0;N<_;++N)if(M=w[N]){for(N>=E&&(E=N+1);!(A=b[E])&&++E<_;);M._next=A||null}}return(u=new ht(u,r))._enter=a,u._exit=l,u},enter:function(){return new ht(this._enter||this._groups.map(f),this._parents)},exit:function(){return new ht(this._exit||this._groups.map(f),this._parents)},join:function(t,n,e){var r=this.enter(),i=this,o=this.exit();return"function"==typeof t?(r=t(r))&&(r=r.selection()):r=r.append(t+""),null!=n&&(i=n(i))&&(i=i.selection()),null==e?o.remove():e(o),r&&i?r.merge(i).order():i},merge:function(t){for(var n=t.selection?t.selection():t,e=this._groups,r=n._groups,i=e.length,o=r.length,u=Math.min(i,o),a=new Array(i),l=0;l<u;++l)for(var s,c=e[l],f=r[l],h=c.length,p=a[l]=new Array(h),d=0;d<h;++d)(s=c[d]||f[d])&&(p[d]=s);for(;l<i;++l)a[l]=e[l];return new ht(a,this._parents)},selection:function(){return this},order:function(){for(var t=this._groups,n=-1,e=t.length;++n<e;)for(var r,i=t[n],o=i.length-1,u=i[o];--o>=0;)(r=i[o])&&(u&&4^r.compareDocumentPosition(u)&&u.parentNode.insertBefore(r,u),u=r);return this},sort:function(t){function n(n,e){return n&&e?t(n.__data__,e.__data__):!n-!e}t||(t=m);for(var e=this._groups,r=e.length,i=new Array(r),o=0;o<r;++o){for(var u,a=e[o],l=a.length,s=i[o]=new Array(l),c=0;c<l;++c)(u=a[c])&&(s[c]=u);s.sort(n)}return new ht(i,this._parents).order()},call:function(){var t=arguments[0];return arguments[0]=this,t.apply(null,arguments),this},nodes:function(){return Array.from(this)},node:function(){for(var t=this._groups,n=0,e=t.length;n<e;++n)for(var r=t[n],i=0,o=r.length;i<o;++i){var u=r[i];if(u)return u}return null},size:function(){let t=0;for(const n of this)++t;return t},empty:function(){return!this.node()},each:function(t){for(var n=this._groups,e=0,r=n.length;e<r;++e)for(var i,o=n[e],u=0,a=o.length;u<a;++u)(i=o[u])&&t.call(i,i.__data__,u,o);return this},attr:function(t,n){var e=b(t);if(arguments.length<2){var r=this.node();return e.local?r.getAttributeNS(e.space,e.local):r.getAttribute(e)}return this.each((null==n?e.local?M:x:"function"==typeof n?e.local?k:E:e.local?N:A)(e,n))},style:function(t,n,e){return arguments.length>1?this.each((null==n?C:"function"==typeof n?j:P)(t,n,null==e?"":e)):q(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?O:"function"==typeof n?T:L)(t,n)):this.node()[t]},classed:function(t,n){var e=B(t+"");if(arguments.length<2){for(var r=D(this.node()),i=-1,o=e.length;++i<o;)if(!r.contains(e[i]))return!1;return!0}return this.each(("function"==typeof n?I:n?X:z)(e,n))},text:function(t){return arguments.length?this.each(null==t?$:("function"==typeof t?Y:U)(t)):this.node().textContent},html:function(t){return arguments.length?this.each(null==t?F:("function"==typeof t?G:Z)(t)):this.node().innerHTML},raise:function(){return this.each(J)},lower:function(){return this.each(K)},append:function(t){var n="function"==typeof t?t:tt(t);return this.select((function(){return this.appendChild(n.apply(this,arguments))}))},insert:function(t,n){var e="function"==typeof t?t:tt(t),i=null==n?nt:"function"==typeof n?n:r(n);return this.select((function(){return this.insertBefore(e.apply(this,arguments),i.apply(this,arguments)||null)}))},remove:function(){return this.each(et)},clone:function(t){return this.select(t?it:rt)},datum:function(t){return arguments.length?this.property("__data__",t):this.node().__data__},on:function(t,n,e){var r,i,o=ot(t+""),u=o.length;if(!(arguments.length<2)){for(a=n?at:ut,r=0;r<u;++r)this.each(a(o[r],n,e));return this}var a=this.node().__on;if(a)for(var l,s=0,c=a.length;s<c;++s)for(r=0,l=a[s];r<u;++r)if((i=o[r]).type===l.type&&i.name===l.name)return l.value},dispatch:function(t,n){return this.each(("function"==typeof n?ct:st)(t,n))},[Symbol.iterator]:function*(){for(var t=this._groups,n=0,e=t.length;n<e;++n)for(var r,i=t[n],o=0,u=i.length;o<u;++o)(r=i[o])&&(yield r)}};var bt=Array.prototype.find;function xt(){return this.firstElementChild}var Mt=Array.prototype.filter;function At(){return Array.from(this.children)}function Nt(t){return new Array(t.length)}function Et(t,n){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=n}function kt(t){return function(){return t}}function St(t,n,e,r,i,o){for(var u,a=0,l=n.length,s=o.length;a<s;++a)(u=n[a])?(u.__data__=o[a],r[a]=u):e[a]=new Et(t,o[a]);for(;a<l;++a)(u=n[a])&&(i[a]=u)}function Ct(t,n,e,r,i,o,u){var a,l,s,c=new Map,f=n.length,h=o.length,p=new Array(f);for(a=0;a<f;++a)(l=n[a])&&(p[a]=s=u.call(l,l.__data__,a,n)+"",c.has(s)?i[a]=l:c.set(s,l));for(a=0;a<h;++a)s=u.call(t,o[a],a,o)+"",(l=c.get(s))?(r[a]=l,l.__data__=o[a],c.delete(s)):e[a]=new Et(t,o[a]);for(a=0;a<f;++a)(l=n[a])&&c.get(p[a])===l&&(i[a]=l)}function Pt(t){return t.__data__}function jt(t){return"object"==typeof t&&"length"in t?t:Array.from(t)}function qt(t,n){return t<n?-1:t>n?1:t>=n?0:NaN}Et.prototype={constructor:Et,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,n){return this._parent.insertBefore(t,n)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var Ot="http://www.w3.org/1999/xhtml";const Lt={svg:"http://www.w3.org/2000/svg",xhtml:Ot,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function Tt(t){var n=t+="",e=n.indexOf(":");return e>=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),Lt.hasOwnProperty(n)?{space:Lt[n],local:t}:t}function Bt(t){return function(){this.removeAttribute(t)}}function Dt(t){return function(){this.removeAttributeNS(t.space,t.local)}}function Ht(t,n){return function(){this.setAttribute(t,n)}}function Rt(t,n){return function(){this.setAttributeNS(t.space,t.local,n)}}function Vt(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttribute(t):this.setAttribute(t,e)}}function Xt(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,e)}}function zt(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function It(t){return function(){this.style.removeProperty(t)}}function $t(t,n,e){return function(){this.style.setProperty(t,n,e)}}function Ut(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}function Yt(t,n){return t.style.getPropertyValue(n)||zt(t).getComputedStyle(t,null).getPropertyValue(n)}function Ft(t){return function(){delete this[t]}}function Zt(t,n){return function(){this[t]=n}}function Gt(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}function Jt(t){return t.trim().split(/^|\s+/)}function Kt(t){return t.classList||new Qt(t)}function Qt(t){this._node=t,this._names=Jt(t.getAttribute("class")||"")}function Wt(t,n){for(var e=Kt(t),r=-1,i=n.length;++r<i;)e.add(n[r])}function tn(t,n){for(var e=Kt(t),r=-1,i=n.length;++r<i;)e.remove(n[r])}function nn(t){return function(){Wt(this,t)}}function en(t){return function(){tn(this,t)}}function rn(t,n){return function(){(n.apply(this,arguments)?Wt:tn)(this,t)}}function on(){this.textContent=""}function un(t){return function(){this.textContent=t}}function an(t){return function(){var n=t.apply(this,arguments);this.textContent=null==n?"":n}}function ln(){this.innerHTML=""}function sn(t){return function(){this.innerHTML=t}}function cn(t){return function(){var n=t.apply(this,arguments);this.innerHTML=null==n?"":n}}function fn(){this.nextSibling&&this.parentNode.appendChild(this)}function hn(){this.previousSibling&&this.parentNode.insertBefore(this,this.parentNode.firstChild)}function pn(t){return function(){var n=this.ownerDocument,e=this.namespaceURI;return e===Ot&&n.documentElement.namespaceURI===Ot?n.createElement(t):n.createElementNS(e,t)}}function dn(t){return function(){return this.ownerDocument.createElementNS(t.space,t.local)}}function gn(t){var n=Tt(t);return(n.local?dn:pn)(n)}function vn(){return null}function yn(){var t=this.parentNode;t&&t.removeChild(this)}function mn(){var t=this.cloneNode(!1),n=this.parentNode;return n?n.insertBefore(t,this.nextSibling):t}function _n(){var t=this.cloneNode(!0),n=this.parentNode;return n?n.insertBefore(t,this.nextSibling):t}function wn(t){return t.trim().split(/^|\s+/).map((function(t){var n="",e=t.indexOf(".");return e>=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}}))}function bn(t){return function(){var n=this.__on;if(n){for(var e,r=0,i=-1,o=n.length;r<o;++r)e=n[r],t.type&&e.type!==t.type||e.name!==t.name?n[++i]=e:this.removeEventListener(e.type,e.listener,e.options);++i?n.length=i:delete this.__on}}}function xn(t,n,e){return function(){var r,i=this.__on,o=function(t){return function(n){t.call(this,n,this.__data__)}}(n);if(i)for(var u=0,a=i.length;u<a;++u)if((r=i[u]).type===t.type&&r.name===t.name)return this.removeEventListener(r.type,r.listener,r.options),this.addEventListener(r.type,r.listener=o,r.options=e),void(r.value=n);this.addEventListener(t.type,o,e),r={type:t.type,name:t.name,value:n,listener:o,options:e},i?i.push(r):this.__on=[r]}}function Mn(t,n,e){var r=zt(t),i=r.CustomEvent;"function"==typeof i?i=new i(n,e):(i=r.document.createEvent("Event"),e?(i.initEvent(n,e.bubbles,e.cancelable),i.detail=e.detail):i.initEvent(n,!1,!1)),t.dispatchEvent(i)}function An(t,n){return function(){return Mn(this,t,n)}}function Nn(t,n){return function(){return Mn(this,t,n.apply(this,arguments))}}Qt.prototype={add:function(t){this._names.indexOf(t)<0&&(this._names.push(t),this._node.setAttribute("class",this._names.join(" ")))},remove:function(t){var n=this._names.indexOf(t);n>=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var En=[null];function kn(t,n){this._groups=t,this._parents=n}function Sn(){return new kn([[document.documentElement]],En)}kn.prototype=Sn.prototype={constructor:kn,select:function(t){"function"!=typeof t&&(t=gt(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i<e;++i)for(var o,u,a=n[i],l=a.length,s=r[i]=new Array(l),c=0;c<l;++c)(o=a[c])&&(u=t.call(o,o.__data__,c,a))&&("__data__"in o&&(u.__data__=o.__data__),s[c]=u);return new kn(r,this._parents)},selectAll:function(t){t="function"==typeof t?function(t){return function(){return vt(t.apply(this,arguments))}}(t):mt(t);for(var n=this._groups,e=n.length,r=[],i=[],o=0;o<e;++o)for(var u,a=n[o],l=a.length,s=0;s<l;++s)(u=a[s])&&(r.push(t.call(u,u.__data__,s,a)),i.push(u));return new kn(r,i)},selectChild:function(t){return this.select(null==t?xt:function(t){return function(){return bt.call(this.children,t)}}("function"==typeof t?t:wt(t)))},selectChildren:function(t){return this.selectAll(null==t?At:function(t){return function(){return Mt.call(this.children,t)}}("function"==typeof t?t:wt(t)))},filter:function(t){"function"!=typeof t&&(t=_t(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i<e;++i)for(var o,u=n[i],a=u.length,l=r[i]=[],s=0;s<a;++s)(o=u[s])&&t.call(o,o.__data__,s,u)&&l.push(o);return new kn(r,this._parents)},data:function(t,n){if(!arguments.length)return Array.from(this,Pt);var e=n?Ct:St,r=this._parents,i=this._groups;"function"!=typeof t&&(t=kt(t));for(var o=i.length,u=new Array(o),a=new Array(o),l=new Array(o),s=0;s<o;++s){var c=r[s],f=i[s],h=f.length,p=jt(t.call(c,c&&c.__data__,s,r)),d=p.length,g=a[s]=new Array(d),v=u[s]=new Array(d),y=l[s]=new Array(h);e(c,f,g,v,y,p,n);for(var m,_,w=0,b=0;w<d;++w)if(m=g[w]){for(w>=b&&(b=w+1);!(_=v[b])&&++b<d;);m._next=_||null}}return(u=new kn(u,r))._enter=a,u._exit=l,u},enter:function(){return new kn(this._enter||this._groups.map(Nt),this._parents)},exit:function(){return new kn(this._exit||this._groups.map(Nt),this._parents)},join:function(t,n,e){var r=this.enter(),i=this,o=this.exit();return"function"==typeof t?(r=t(r))&&(r=r.selection()):r=r.append(t+""),null!=n&&(i=n(i))&&(i=i.selection()),null==e?o.remove():e(o),r&&i?r.merge(i).order():i},merge:function(t){for(var n=t.selection?t.selection():t,e=this._groups,r=n._groups,i=e.length,o=r.length,u=Math.min(i,o),a=new Array(i),l=0;l<u;++l)for(var s,c=e[l],f=r[l],h=c.length,p=a[l]=new Array(h),d=0;d<h;++d)(s=c[d]||f[d])&&(p[d]=s);for(;l<i;++l)a[l]=e[l];return new kn(a,this._parents)},selection:function(){return this},order:function(){for(var t=this._groups,n=-1,e=t.length;++n<e;)for(var r,i=t[n],o=i.length-1,u=i[o];--o>=0;)(r=i[o])&&(u&&4^r.compareDocumentPosition(u)&&u.parentNode.insertBefore(r,u),u=r);return this},sort:function(t){function n(n,e){return n&&e?t(n.__data__,e.__data__):!n-!e}t||(t=qt);for(var e=this._groups,r=e.length,i=new Array(r),o=0;o<r;++o){for(var u,a=e[o],l=a.length,s=i[o]=new Array(l),c=0;c<l;++c)(u=a[c])&&(s[c]=u);s.sort(n)}return new kn(i,this._parents).order()},call:function(){var t=arguments[0];return arguments[0]=this,t.apply(null,arguments),this},nodes:function(){return Array.from(this)},node:function(){for(var t=this._groups,n=0,e=t.length;n<e;++n)for(var r=t[n],i=0,o=r.length;i<o;++i){var u=r[i];if(u)return u}return null},size:function(){let t=0;for(const n of this)++t;return t},empty:function(){return!this.node()},each:function(t){for(var n=this._groups,e=0,r=n.length;e<r;++e)for(var i,o=n[e],u=0,a=o.length;u<a;++u)(i=o[u])&&t.call(i,i.__data__,u,o);return this},attr:function(t,n){var e=Tt(t);if(arguments.length<2){var r=this.node();return e.local?r.getAttributeNS(e.space,e.local):r.getAttribute(e)}return this.each((null==n?e.local?Dt:Bt:"function"==typeof n?e.local?Xt:Vt:e.local?Rt:Ht)(e,n))},style:function(t,n,e){return arguments.length>1?this.each((null==n?It:"function"==typeof n?Ut:$t)(t,n,null==e?"":e)):Yt(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?Ft:"function"==typeof n?Gt:Zt)(t,n)):this.node()[t]},classed:function(t,n){var e=Jt(t+"");if(arguments.length<2){for(var r=Kt(this.node()),i=-1,o=e.length;++i<o;)if(!r.contains(e[i]))return!1;return!0}return this.each(("function"==typeof n?rn:n?nn:en)(e,n))},text:function(t){return arguments.length?this.each(null==t?on:("function"==typeof t?an:un)(t)):this.node().textContent},html:function(t){return arguments.length?this.each(null==t?ln:("function"==typeof t?cn:sn)(t)):this.node().innerHTML},raise:function(){return this.each(fn)},lower:function(){return this.each(hn)},append:function(t){var n="function"==typeof t?t:gn(t);return this.select((function(){return this.appendChild(n.apply(this,arguments))}))},insert:function(t,n){var e="function"==typeof t?t:gn(t),r=null==n?vn:"function"==typeof n?n:gt(n);return this.select((function(){return this.insertBefore(e.apply(this,arguments),r.apply(this,arguments)||null)}))},remove:function(){return this.each(yn)},clone:function(t){return this.select(t?_n:mn)},datum:function(t){return arguments.length?this.property("__data__",t):this.node().__data__},on:function(t,n,e){var r,i,o=wn(t+""),u=o.length;if(!(arguments.length<2)){for(a=n?xn:bn,r=0;r<u;++r)this.each(a(o[r],n,e));return this}var a=this.node().__on;if(a)for(var l,s=0,c=a.length;s<c;++s)for(r=0,l=a[s];r<u;++r)if((i=o[r]).type===l.type&&i.name===l.name)return l.value},dispatch:function(t,n){return this.each(("function"==typeof n?Nn:An)(t,n))},[Symbol.iterator]:function*(){for(var t=this._groups,n=0,e=t.length;n<e;++n)for(var r,i=t[n],o=0,u=i.length;o<u;++o)(r=i[o])&&(yield r)}};const Cn=Sn;function Pn(t){return"string"==typeof t?new kn([[document.querySelector(t)]],[document.documentElement]):new kn([[t]],En)}function jn(t,n){if((e=(t=n?t.toExponential(n-1):t.toExponential()).indexOf("e"))<0)return null;var e,r=t.slice(0,e);return[r.length>1?r[0]+r.slice(2):r,+t.slice(e+1)]}function qn(t){return(t=jn(Math.abs(t)))?t[1]:NaN}var On,Ln=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function Tn(t){if(!(n=Ln.exec(t)))throw new Error("invalid format: "+t);var n;return new Bn({fill:n[1],align:n[2],sign:n[3],symbol:n[4],zero:n[5],width:n[6],comma:n[7],precision:n[8]&&n[8].slice(1),trim:n[9],type:n[10]})}function Bn(t){this.fill=void 0===t.fill?" ":t.fill+"",this.align=void 0===t.align?">":t.align+"",this.sign=void 0===t.sign?"-":t.sign+"",this.symbol=void 0===t.symbol?"":t.symbol+"",this.zero=!!t.zero,this.width=void 0===t.width?void 0:+t.width,this.comma=!!t.comma,this.precision=void 0===t.precision?void 0:+t.precision,this.trim=!!t.trim,this.type=void 0===t.type?"":t.type+""}function Dn(t,n){var e=jn(t,n);if(!e)return t+"";var r=e[0],i=e[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")}Tn.prototype=Bn.prototype,Bn.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(void 0===this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(void 0===this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};const Hn={"%":(t,n)=>(100*t).toFixed(n),b:t=>Math.round(t).toString(2),c:t=>t+"",d:function(t){return Math.abs(t=Math.round(t))>=1e21?t.toLocaleString("en").replace(/,/g,""):t.toString(10)},e:(t,n)=>t.toExponential(n),f:(t,n)=>t.toFixed(n),g:(t,n)=>t.toPrecision(n),o:t=>Math.round(t).toString(8),p:(t,n)=>Dn(100*t,n),r:Dn,s:function(t,n){var e=jn(t,n);if(!e)return t+"";var r=e[0],i=e[1],o=i-(On=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,u=r.length;return o===u?r:o>u?r+new Array(o-u+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+jn(t,Math.max(0,n+o-1))[0]},X:t=>Math.round(t).toString(16).toUpperCase(),x:t=>Math.round(t).toString(16)};function Rn(t){return t}var Vn,Xn,zn,In=Array.prototype.map,$n=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];function Un(t,n){return null==t||null==n?NaN:t<n?-1:t>n?1:t>=n?0:NaN}function Yn(t){t.x0=Math.round(t.x0),t.y0=Math.round(t.y0),t.x1=Math.round(t.x1),t.y1=Math.round(t.y1)}function Fn(t){var n=0,e=t.children,r=e&&e.length;if(r)for(;--r>=0;)n+=e[r].value;else n=1;t.value=n}function Zn(t,n){t instanceof Map?(t=[void 0,t],void 0===n&&(n=Jn)):void 0===n&&(n=Gn);for(var e,r,i,o,u,a=new Wn(t),l=[a];e=l.pop();)if((i=n(e.data))&&(u=(i=Array.from(i)).length))for(e.children=i,o=u-1;o>=0;--o)l.push(r=i[o]=new Wn(i[o])),r.parent=e,r.depth=e.depth+1;return a.eachBefore(Qn)}function Gn(t){return t.children}function Jn(t){return Array.isArray(t)?t[1]:null}function Kn(t){void 0!==t.data.value&&(t.value=t.data.value),t.data=t.data.data}function Qn(t){var n=0;do{t.height=n}while((t=t.parent)&&t.height<++n)}function Wn(t){this.data=t,this.depth=this.height=0,this.parent=null}Vn=function(t){var n,e,r=void 0===t.grouping||void 0===t.thousands?Rn:(n=In.call(t.grouping,Number),e=t.thousands+"",function(t,r){for(var i=t.length,o=[],u=0,a=n[0],l=0;i>0&&a>0&&(l+a+1>r&&(a=Math.max(1,r-l)),o.push(t.substring(i-=a,i+a)),!((l+=a+1)>r));)a=n[u=(u+1)%n.length];return o.reverse().join(e)}),i=void 0===t.currency?"":t.currency[0]+"",o=void 0===t.currency?"":t.currency[1]+"",u=void 0===t.decimal?".":t.decimal+"",a=void 0===t.numerals?Rn:function(t){return function(n){return n.replace(/[0-9]/g,(function(n){return t[+n]}))}}(In.call(t.numerals,String)),l=void 0===t.percent?"%":t.percent+"",s=void 0===t.minus?"−":t.minus+"",c=void 0===t.nan?"NaN":t.nan+"";function f(t){var n=(t=Tn(t)).fill,e=t.align,f=t.sign,h=t.symbol,p=t.zero,d=t.width,g=t.comma,v=t.precision,y=t.trim,m=t.type;"n"===m?(g=!0,m="g"):Hn[m]||(void 0===v&&(v=12),y=!0,m="g"),(p||"0"===n&&"="===e)&&(p=!0,n="0",e="=");var _="$"===h?i:"#"===h&&/[boxX]/.test(m)?"0"+m.toLowerCase():"",w="$"===h?o:/[%p]/.test(m)?l:"",b=Hn[m],x=/[defgprs%]/.test(m);function M(t){var i,o,l,h=_,M=w;if("c"===m)M=b(t)+M,t="";else{var A=(t=+t)<0||1/t<0;if(t=isNaN(t)?c:b(Math.abs(t),v),y&&(t=function(t){t:for(var n,e=t.length,r=1,i=-1;r<e;++r)switch(t[r]){case".":i=n=r;break;case"0":0===i&&(i=r),n=r;break;default:if(!+t[r])break t;i>0&&(i=0)}return i>0?t.slice(0,i)+t.slice(n+1):t}(t)),A&&0==+t&&"+"!==f&&(A=!1),h=(A?"("===f?f:s:"-"===f||"("===f?"":f)+h,M=("s"===m?$n[8+On/3]:"")+M+(A&&"("===f?")":""),x)for(i=-1,o=t.length;++i<o;)if(48>(l=t.charCodeAt(i))||l>57){M=(46===l?u+t.slice(i+1):t.slice(i))+M,t=t.slice(0,i);break}}g&&!p&&(t=r(t,1/0));var N=h.length+t.length+M.length,E=N<d?new Array(d-N+1).join(n):"";switch(g&&p&&(t=r(E+t,E.length?d-M.length:1/0),E=""),e){case"<":t=h+t+M+E;break;case"=":t=h+E+t+M;break;case"^":t=E.slice(0,N=E.length>>1)+h+t+M+E.slice(N);break;default:t=E+h+t+M}return a(t)}return v=void 0===v?6:/[gprs]/.test(m)?Math.max(1,Math.min(21,v)):Math.max(0,Math.min(20,v)),M.toString=function(){return t+""},M}return{format:f,formatPrefix:function(t,n){var e=f(((t=Tn(t)).type="f",t)),r=3*Math.max(-8,Math.min(8,Math.floor(qn(n)/3))),i=Math.pow(10,-r),o=$n[8+r/3];return function(t){return e(i*t)+o}}}}({thousands:",",grouping:[3],currency:["$",""]}),Xn=Vn.format,zn=Vn.formatPrefix,Wn.prototype=Zn.prototype={constructor:Wn,count:function(){return this.eachAfter(Fn)},each:function(t,n){let e=-1;for(const r of this)t.call(n,r,++e,this);return this},eachAfter:function(t,n){for(var e,r,i,o=this,u=[o],a=[],l=-1;o=u.pop();)if(a.push(o),e=o.children)for(r=0,i=e.length;r<i;++r)u.push(e[r]);for(;o=a.pop();)t.call(n,o,++l,this);return this},eachBefore:function(t,n){for(var e,r,i=this,o=[i],u=-1;i=o.pop();)if(t.call(n,i,++u,this),e=i.children)for(r=e.length-1;r>=0;--r)o.push(e[r]);return this},find:function(t,n){let e=-1;for(const r of this)if(t.call(n,r,++e,this))return r},sum:function(t){return this.eachAfter((function(n){for(var e=+t(n.data)||0,r=n.children,i=r&&r.length;--i>=0;)e+=r[i].value;n.value=e}))},sort:function(t){return this.eachBefore((function(n){n.children&&n.children.sort(t)}))},path:function(t){for(var n=this,e=function(t,n){if(t===n)return t;var e=t.ancestors(),r=n.ancestors(),i=null;for(t=e.pop(),n=r.pop();t===n;)i=t,t=e.pop(),n=r.pop();return i}(n,t),r=[n];n!==e;)n=n.parent,r.push(n);for(var i=r.length;t!==e;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,n=[t];t=t.parent;)n.push(t);return n},descendants:function(){return Array.from(this)},leaves:function(){var t=[];return this.eachBefore((function(n){n.children||t.push(n)})),t},links:function(){var t=this,n=[];return t.each((function(e){e!==t&&n.push({source:e.parent,target:e})})),n},copy:function(){return Zn(this).eachBefore(Kn)},[Symbol.iterator]:function*(){var t,n,e,r,i=this,o=[i];do{for(t=o.reverse(),o=[];i=t.pop();)if(yield i,n=i.children)for(e=0,r=n.length;e<r;++e)o.push(n[e])}while(o.length)}};var te=Math.sqrt(50),ne=Math.sqrt(10),ee=Math.sqrt(2);function re(t,n,e){var r=(n-t)/Math.max(0,e),i=Math.floor(Math.log(r)/Math.LN10),o=r/Math.pow(10,i);return i>=0?(o>=te?10:o>=ne?5:o>=ee?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(o>=te?10:o>=ne?5:o>=ee?2:1)}function ie(t){let n=t,e=t,r=t;function i(t,n,i=0,o=t.length){if(i<o){if(0!==e(n,n))return o;do{const e=i+o>>>1;r(t[e],n)<0?i=e+1:o=e}while(i<o)}return i}return 2!==t.length&&(n=(n,e)=>t(n)-e,e=Un,r=(n,e)=>Un(t(n),e)),{left:i,center:function(t,e,r=0,o=t.length){const u=i(t,e,r,o-1);return u>r&&n(t[u-1],e)>-n(t[u],e)?u-1:u},right:function(t,n,i=0,o=t.length){if(i<o){if(0!==e(n,n))return o;do{const e=i+o>>>1;r(t[e],n)<=0?i=e+1:o=e}while(i<o)}return i}}}const oe=ie(Un),ue=oe.right,ae=(oe.left,ie((function(t){return null===t?NaN:+t})).center,ue);function le(t,n,e){t.prototype=n.prototype=e,e.constructor=t}function se(t,n){var e=Object.create(t.prototype);for(var r in n)e[r]=n[r];return e}function ce(){}var fe=.7,he=1/fe,pe="\\s*([+-]?\\d+)\\s*",de="\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)\\s*",ge="\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)%\\s*",ve=/^#([0-9a-f]{3,8})$/,ye=new RegExp("^rgb\\("+[pe,pe,pe]+"\\)$"),me=new RegExp("^rgb\\("+[ge,ge,ge]+"\\)$"),_e=new RegExp("^rgba\\("+[pe,pe,pe,de]+"\\)$"),we=new RegExp("^rgba\\("+[ge,ge,ge,de]+"\\)$"),be=new RegExp("^hsl\\("+[de,ge,ge]+"\\)$"),xe=new RegExp("^hsla\\("+[de,ge,ge,de]+"\\)$"),Me={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074};function Ae(){return this.rgb().formatHex()}function Ne(){return this.rgb().formatRgb()}function Ee(t){var n,e;return t=(t+"").trim().toLowerCase(),(n=ve.exec(t))?(e=n[1].length,n=parseInt(n[1],16),6===e?ke(n):3===e?new je(n>>8&15|n>>4&240,n>>4&15|240&n,(15&n)<<4|15&n,1):8===e?Se(n>>24&255,n>>16&255,n>>8&255,(255&n)/255):4===e?Se(n>>12&15|n>>8&240,n>>8&15|n>>4&240,n>>4&15|240&n,((15&n)<<4|15&n)/255):null):(n=ye.exec(t))?new je(n[1],n[2],n[3],1):(n=me.exec(t))?new je(255*n[1]/100,255*n[2]/100,255*n[3]/100,1):(n=_e.exec(t))?Se(n[1],n[2],n[3],n[4]):(n=we.exec(t))?Se(255*n[1]/100,255*n[2]/100,255*n[3]/100,n[4]):(n=be.exec(t))?Te(n[1],n[2]/100,n[3]/100,1):(n=xe.exec(t))?Te(n[1],n[2]/100,n[3]/100,n[4]):Me.hasOwnProperty(t)?ke(Me[t]):"transparent"===t?new je(NaN,NaN,NaN,0):null}function ke(t){return new je(t>>16&255,t>>8&255,255&t,1)}function Se(t,n,e,r){return r<=0&&(t=n=e=NaN),new je(t,n,e,r)}function Ce(t){return t instanceof ce||(t=Ee(t)),t?new je((t=t.rgb()).r,t.g,t.b,t.opacity):new je}function Pe(t,n,e,r){return 1===arguments.length?Ce(t):new je(t,n,e,null==r?1:r)}function je(t,n,e,r){this.r=+t,this.g=+n,this.b=+e,this.opacity=+r}function qe(){return"#"+Le(this.r)+Le(this.g)+Le(this.b)}function Oe(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"rgb(":"rgba(")+Math.max(0,Math.min(255,Math.round(this.r)||0))+", "+Math.max(0,Math.min(255,Math.round(this.g)||0))+", "+Math.max(0,Math.min(255,Math.round(this.b)||0))+(1===t?")":", "+t+")")}function Le(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function Te(t,n,e,r){return r<=0?t=n=e=NaN:e<=0||e>=1?t=n=NaN:n<=0&&(t=NaN),new De(t,n,e,r)}function Be(t){if(t instanceof De)return new De(t.h,t.s,t.l,t.opacity);if(t instanceof ce||(t=Ee(t)),!t)return new De;if(t instanceof De)return t;var n=(t=t.rgb()).r/255,e=t.g/255,r=t.b/255,i=Math.min(n,e,r),o=Math.max(n,e,r),u=NaN,a=o-i,l=(o+i)/2;return a?(u=n===o?(e-r)/a+6*(e<r):e===o?(r-n)/a+2:(n-e)/a+4,a/=l<.5?o+i:2-o-i,u*=60):a=l>0&&l<1?0:u,new De(u,a,l,t.opacity)}function De(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function He(t,n,e){return 255*(t<60?n+(e-n)*t/60:t<180?e:t<240?n+(e-n)*(240-t)/60:n)}function Re(t,n,e,r,i){var o=t*t,u=o*t;return((1-3*t+3*o-u)*n+(4-6*o+3*u)*e+(1+3*t+3*o-3*u)*r+u*i)/6}function Ve(t){return function(){return t}}function Xe(t,n){var e=n-t;return e?function(t,n){return function(e){return t+e*n}}(t,e):Ve(isNaN(t)?n:t)}le(ce,Ee,{copy:function(t){return Object.assign(new this.constructor,this,t)},displayable:function(){return this.rgb().displayable()},hex:Ae,formatHex:Ae,formatHsl:function(){return Be(this).formatHsl()},formatRgb:Ne,toString:Ne}),le(je,Pe,se(ce,{brighter:function(t){return t=null==t?he:Math.pow(he,t),new je(this.r*t,this.g*t,this.b*t,this.opacity)},darker:function(t){return t=null==t?fe:Math.pow(fe,t),new je(this.r*t,this.g*t,this.b*t,this.opacity)},rgb:function(){return this},displayable:function(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:qe,formatHex:qe,formatRgb:Oe,toString:Oe})),le(De,(function(t,n,e,r){return 1===arguments.length?Be(t):new De(t,n,e,null==r?1:r)}),se(ce,{brighter:function(t){return t=null==t?he:Math.pow(he,t),new De(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?fe:Math.pow(fe,t),new De(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=this.h%360+360*(this.h<0),n=isNaN(t)||isNaN(this.s)?0:this.s,e=this.l,r=e+(e<.5?e:1-e)*n,i=2*e-r;return new je(He(t>=240?t-240:t+120,i,r),He(t,i,r),He(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl:function(){var t=this.opacity;return(1===(t=isNaN(t)?1:Math.max(0,Math.min(1,t)))?"hsl(":"hsla(")+(this.h||0)+", "+100*(this.s||0)+"%, "+100*(this.l||0)+"%"+(1===t?")":", "+t+")")}}));const ze=function t(n){var e=function(t){return 1==(t=+t)?Xe:function(n,e){return e-n?function(t,n,e){return t=Math.pow(t,e),n=Math.pow(n,e)-t,e=1/e,function(r){return Math.pow(t+r*n,e)}}(n,e,t):Ve(isNaN(n)?e:n)}}(n);function r(t,n){var r=e((t=Pe(t)).r,(n=Pe(n)).r),i=e(t.g,n.g),o=e(t.b,n.b),u=Xe(t.opacity,n.opacity);return function(n){return t.r=r(n),t.g=i(n),t.b=o(n),t.opacity=u(n),t+""}}return r.gamma=t,r}(1);function Ie(t){return function(n){var e,r,i=n.length,o=new Array(i),u=new Array(i),a=new Array(i);for(e=0;e<i;++e)r=Pe(n[e]),o[e]=r.r||0,u[e]=r.g||0,a[e]=r.b||0;return o=t(o),u=t(u),a=t(a),r.opacity=1,function(t){return r.r=o(t),r.g=u(t),r.b=a(t),r+""}}}function $e(t,n){var e,r=n?n.length:0,i=t?Math.min(r,t.length):0,o=new Array(i),u=new Array(r);for(e=0;e<i;++e)o[e]=Qe(t[e],n[e]);for(;e<r;++e)u[e]=n[e];return function(t){for(e=0;e<i;++e)u[e]=o[e](t);return u}}function Ue(t,n){var e=new Date;return t=+t,n=+n,function(r){return e.setTime(t*(1-r)+n*r),e}}function Ye(t,n){return t=+t,n=+n,function(e){return t*(1-e)+n*e}}function Fe(t,n){var e,r={},i={};for(e in null!==t&&"object"==typeof t||(t={}),null!==n&&"object"==typeof n||(n={}),n)e in t?r[e]=Qe(t[e],n[e]):i[e]=n[e];return function(t){for(e in r)i[e]=r[e](t);return i}}Ie((function(t){var n=t.length-1;return function(e){var r=e<=0?e=0:e>=1?(e=1,n-1):Math.floor(e*n),i=t[r],o=t[r+1],u=r>0?t[r-1]:2*i-o,a=r<n-1?t[r+2]:2*o-i;return Re((e-r/n)*n,u,i,o,a)}})),Ie((function(t){var n=t.length;return function(e){var r=Math.floor(((e%=1)<0?++e:e)*n),i=t[(r+n-1)%n],o=t[r%n],u=t[(r+1)%n],a=t[(r+2)%n];return Re((e-r/n)*n,i,o,u,a)}}));var Ze=/[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g,Ge=new RegExp(Ze.source,"g");function Je(t,n){var e,r,i,o=Ze.lastIndex=Ge.lastIndex=0,u=-1,a=[],l=[];for(t+="",n+="";(e=Ze.exec(t))&&(r=Ge.exec(n));)(i=r.index)>o&&(i=n.slice(o,i),a[u]?a[u]+=i:a[++u]=i),(e=e[0])===(r=r[0])?a[u]?a[u]+=r:a[++u]=r:(a[++u]=null,l.push({i:u,x:Ye(e,r)})),o=Ge.lastIndex;return o<n.length&&(i=n.slice(o),a[u]?a[u]+=i:a[++u]=i),a.length<2?l[0]?function(t){return function(n){return t(n)+""}}(l[0].x):function(t){return function(){return t}}(n):(n=l.length,function(t){for(var e,r=0;r<n;++r)a[(e=l[r]).i]=e.x(t);return a.join("")})}function Ke(t,n){n||(n=[]);var e,r=t?Math.min(n.length,t.length):0,i=n.slice();return function(o){for(e=0;e<r;++e)i[e]=t[e]*(1-o)+n[e]*o;return i}}function Qe(t,n){var e,r,i=typeof n;return null==n||"boolean"===i?Ve(n):("number"===i?Ye:"string"===i?(e=Ee(n))?(n=e,ze):Je:n instanceof Ee?ze:n instanceof Date?Ue:(r=n,!ArrayBuffer.isView(r)||r instanceof DataView?Array.isArray(n)?$e:"function"!=typeof n.valueOf&&"function"!=typeof n.toString||isNaN(n)?Fe:Ye:Ke))(t,n)}function We(t,n){return t=+t,n=+n,function(e){return Math.round(t*(1-e)+n*e)}}function tr(t){return+t}var nr=[0,1];function er(t){return t}function rr(t,n){return(n-=t=+t)?function(e){return(e-t)/n}:(e=isNaN(n)?NaN:.5,function(){return e});var e}function ir(t,n,e){var r=t[0],i=t[1],o=n[0],u=n[1];return i<r?(r=rr(i,r),o=e(u,o)):(r=rr(r,i),o=e(o,u)),function(t){return o(r(t))}}function or(t,n,e){var r=Math.min(t.length,n.length)-1,i=new Array(r),o=new Array(r),u=-1;for(t[r]<t[0]&&(t=t.slice().reverse(),n=n.slice().reverse());++u<r;)i[u]=rr(t[u],t[u+1]),o[u]=e(n[u],n[u+1]);return function(n){var e=ae(t,n,1,r)-1;return o[e](i[e](n))}}function ur(t,n){return n.domain(t.domain()).range(t.range()).interpolate(t.interpolate()).clamp(t.clamp()).unknown(t.unknown())}function ar(){return function(){var t,n,e,r,i,o,u=nr,a=nr,l=Qe,s=er;function c(){var t,n,e,l=Math.min(u.length,a.length);return s!==er&&(t=u[0],n=u[l-1],t>n&&(e=t,t=n,n=e),s=function(e){return Math.max(t,Math.min(n,e))}),r=l>2?or:ir,i=o=null,f}function f(n){return null==n||isNaN(n=+n)?e:(i||(i=r(u.map(t),a,l)))(t(s(n)))}return f.invert=function(e){return s(n((o||(o=r(a,u.map(t),Ye)))(e)))},f.domain=function(t){return arguments.length?(u=Array.from(t,tr),c()):u.slice()},f.range=function(t){return arguments.length?(a=Array.from(t),c()):a.slice()},f.rangeRound=function(t){return a=Array.from(t),l=We,c()},f.clamp=function(t){return arguments.length?(s=!!t||er,c()):s!==er},f.interpolate=function(t){return arguments.length?(l=t,c()):l},f.unknown=function(t){return arguments.length?(e=t,f):e},function(e,r){return t=e,n=r,c()}}()(er,er)}function lr(t,n){switch(arguments.length){case 0:break;case 1:this.range(t);break;default:this.range(n).domain(t)}return this}function sr(t){var n=t.domain;return t.ticks=function(t){var e=n();return function(t,n,e){var r,i,o,u,a=-1;if(e=+e,(t=+t)==(n=+n)&&e>0)return[t];if((r=n<t)&&(i=t,t=n,n=i),0===(u=re(t,n,e))||!isFinite(u))return[];if(u>0){let e=Math.round(t/u),r=Math.round(n/u);for(e*u<t&&++e,r*u>n&&--r,o=new Array(i=r-e+1);++a<i;)o[a]=(e+a)*u}else{u=-u;let e=Math.round(t*u),r=Math.round(n*u);for(e/u<t&&++e,r/u>n&&--r,o=new Array(i=r-e+1);++a<i;)o[a]=(e+a)/u}return r&&o.reverse(),o}(e[0],e[e.length-1],null==t?10:t)},t.tickFormat=function(t,e){var r=n();return function(t,n,e,r){var i,o=function(t,n,e){var r=Math.abs(n-t)/Math.max(0,e),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),o=r/i;return o>=te?i*=10:o>=ne?i*=5:o>=ee&&(i*=2),n<t?-i:i}(t,n,e);switch((r=Tn(null==r?",f":r)).type){case"s":var u=Math.max(Math.abs(t),Math.abs(n));return null!=r.precision||isNaN(i=function(t,n){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(qn(n)/3)))-qn(Math.abs(t)))}(o,u))||(r.precision=i),zn(r,u);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(i=function(t,n){return t=Math.abs(t),n=Math.abs(n)-t,Math.max(0,qn(n)-qn(t))+1}(o,Math.max(Math.abs(t),Math.abs(n))))||(r.precision=i-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(i=function(t){return Math.max(0,-qn(Math.abs(t)))}(o))||(r.precision=i-2*("%"===r.type))}return Xn(r)}(r[0],r[r.length-1],null==t?10:t,e)},t.nice=function(e){null==e&&(e=10);var r,i,o=n(),u=0,a=o.length-1,l=o[u],s=o[a],c=10;for(s<l&&(i=l,l=s,s=i,i=u,u=a,a=i);c-- >0;){if((i=re(l,s,e))===r)return o[u]=l,o[a]=s,n(o);if(i>0)l=Math.floor(l/i)*i,s=Math.ceil(s/i)*i;else{if(!(i<0))break;l=Math.ceil(l*i)/i,s=Math.floor(s*i)/i}r=i}return t},t}function cr(){var t=ar();return t.copy=function(){return ur(t,cr())},lr.apply(t,arguments),sr(t)}function fr(t){return((t*=2)<=1?t*t*t:(t-=2)*t*t+2)/2}var hr={value:()=>{}};function pr(){for(var t,n=0,e=arguments.length,r={};n<e;++n){if(!(t=arguments[n]+"")||t in r||/[\s.]/.test(t))throw new Error("illegal type: "+t);r[t]=[]}return new dr(r)}function dr(t){this._=t}function gr(t,n){return t.trim().split(/^|\s+/).map((function(t){var e="",r=t.indexOf(".");if(r>=0&&(e=t.slice(r+1),t=t.slice(0,r)),t&&!n.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:e}}))}function vr(t,n){for(var e,r=0,i=t.length;r<i;++r)if((e=t[r]).name===n)return e.value}function yr(t,n,e){for(var r=0,i=t.length;r<i;++r)if(t[r].name===n){t[r]=hr,t=t.slice(0,r).concat(t.slice(r+1));break}return null!=e&&t.push({name:n,value:e}),t}dr.prototype=pr.prototype={constructor:dr,on:function(t,n){var e,r=this._,i=gr(t+"",r),o=-1,u=i.length;if(!(arguments.length<2)){if(null!=n&&"function"!=typeof n)throw new Error("invalid callback: "+n);for(;++o<u;)if(e=(t=i[o]).type)r[e]=yr(r[e],t.name,n);else if(null==n)for(e in r)r[e]=yr(r[e],t.name,null);return this}for(;++o<u;)if((e=(t=i[o]).type)&&(e=vr(r[e],t.name)))return e},copy:function(){var t={},n=this._;for(var e in n)t[e]=n[e].slice();return new dr(t)},call:function(t,n){if((e=arguments.length-2)>0)for(var e,r,i=new Array(e),o=0;o<e;++o)i[o]=arguments[o+2];if(!this._.hasOwnProperty(t))throw new Error("unknown type: "+t);for(o=0,e=(r=this._[t]).length;o<e;++o)r[o].value.apply(n,i)},apply:function(t,n,e){if(!this._.hasOwnProperty(t))throw new Error("unknown type: "+t);for(var r=this._[t],i=0,o=r.length;i<o;++i)r[i].value.apply(n,e)}};const mr=pr;var _r,wr,br=0,xr=0,Mr=0,Ar=0,Nr=0,Er=0,kr="object"==typeof performance&&performance.now?performance:Date,Sr="object"==typeof window&&window.requestAnimationFrame?window.requestAnimationFrame.bind(window):function(t){setTimeout(t,17)};function Cr(){return Nr||(Sr(Pr),Nr=kr.now()+Er)}function Pr(){Nr=0}function jr(){this._call=this._time=this._next=null}function qr(t,n,e){var r=new jr;return r.restart(t,n,e),r}function Or(){Nr=(Ar=kr.now())+Er,br=xr=0;try{!function(){Cr(),++br;for(var t,n=_r;n;)(t=Nr-n._time)>=0&&n._call.call(null,t),n=n._next;--br}()}finally{br=0,function(){for(var t,n,e=_r,r=1/0;e;)e._call?(r>e._time&&(r=e._time),t=e,e=e._next):(n=e._next,e._next=null,e=t?t._next=n:_r=n);wr=t,Tr(r)}(),Nr=0}}function Lr(){var t=kr.now(),n=t-Ar;n>1e3&&(Er-=n,Ar=t)}function Tr(t){br||(xr&&(xr=clearTimeout(xr)),t-Nr>24?(t<1/0&&(xr=setTimeout(Or,t-kr.now()-Er)),Mr&&(Mr=clearInterval(Mr))):(Mr||(Ar=kr.now(),Mr=setInterval(Lr,1e3)),br=1,Sr(Or)))}function Br(t,n,e){var r=new jr;return n=null==n?0:+n,r.restart((function(e){r.stop(),t(e+n)}),n,e),r}jr.prototype=qr.prototype={constructor:jr,restart:function(t,n,e){if("function"!=typeof t)throw new TypeError("callback is not a function");e=(null==e?Cr():+e)+(null==n?0:+n),this._next||wr===this||(wr?wr._next=this:_r=this,wr=this),this._call=t,this._time=e,Tr()},stop:function(){this._call&&(this._call=null,this._time=1/0,Tr())}};var Dr=mr("start","end","cancel","interrupt"),Hr=[];function Rr(t,n,e,r,i,o){var u=t.__transition;if(u){if(e in u)return}else t.__transition={};!function(t,n,e){var r,i=t.__transition;function o(l){var s,c,f,h;if(1!==e.state)return a();for(s in i)if((h=i[s]).name===e.name){if(3===h.state)return Br(o);4===h.state?(h.state=6,h.timer.stop(),h.on.call("interrupt",t,t.__data__,h.index,h.group),delete i[s]):+s<n&&(h.state=6,h.timer.stop(),h.on.call("cancel",t,t.__data__,h.index,h.group),delete i[s])}if(Br((function(){3===e.state&&(e.state=4,e.timer.restart(u,e.delay,e.time),u(l))})),e.state=2,e.on.call("start",t,t.__data__,e.index,e.group),2===e.state){for(e.state=3,r=new Array(f=e.tween.length),s=0,c=-1;s<f;++s)(h=e.tween[s].value.call(t,t.__data__,e.index,e.group))&&(r[++c]=h);r.length=c+1}}function u(n){for(var i=n<e.duration?e.ease.call(null,n/e.duration):(e.timer.restart(a),e.state=5,1),o=-1,u=r.length;++o<u;)r[o].call(t,i);5===e.state&&(e.on.call("end",t,t.__data__,e.index,e.group),a())}function a(){for(var r in e.state=6,e.timer.stop(),delete i[n],i)return;delete t.__transition}i[n]=e,e.timer=qr((function(t){e.state=1,e.timer.restart(o,e.delay,e.time),e.delay<=t&&o(t-e.delay)}),0,e.time)}(t,e,{name:n,index:r,group:i,on:Dr,tween:Hr,time:o.time,delay:o.delay,duration:o.duration,ease:o.ease,timer:null,state:0})}function Vr(t,n){var e=zr(t,n);if(e.state>0)throw new Error("too late; already scheduled");return e}function Xr(t,n){var e=zr(t,n);if(e.state>3)throw new Error("too late; already running");return e}function zr(t,n){var e=t.__transition;if(!e||!(e=e[n]))throw new Error("transition not found");return e}var Ir,$r,Ur,Yr,Fr=180/Math.PI,Zr={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1};function Gr(t,n,e,r,i,o){var u,a,l;return(u=Math.sqrt(t*t+n*n))&&(t/=u,n/=u),(l=t*e+n*r)&&(e-=t*l,r-=n*l),(a=Math.sqrt(e*e+r*r))&&(e/=a,r/=a,l/=a),t*r<n*e&&(t=-t,n=-n,l=-l,u=-u),{translateX:i,translateY:o,rotate:Math.atan2(n,t)*Fr,skewX:Math.atan(l)*Fr,scaleX:u,scaleY:a}}function Jr(t,n,e,r){function i(t){return t.length?t.pop()+" ":""}return function(o,u){var a=[],l=[];return o=t(o),u=t(u),function(t,r,i,o,u,a){if(t!==i||r!==o){var l=u.push("translate(",null,n,null,e);a.push({i:l-4,x:Ye(t,i)},{i:l-2,x:Ye(r,o)})}else(i||o)&&u.push("translate("+i+n+o+e)}(o.translateX,o.translateY,u.translateX,u.translateY,a,l),function(t,n,e,o){t!==n?(t-n>180?n+=360:n-t>180&&(t+=360),o.push({i:e.push(i(e)+"rotate(",null,r)-2,x:Ye(t,n)})):n&&e.push(i(e)+"rotate("+n+r)}(o.rotate,u.rotate,a,l),function(t,n,e,o){t!==n?o.push({i:e.push(i(e)+"skewX(",null,r)-2,x:Ye(t,n)}):n&&e.push(i(e)+"skewX("+n+r)}(o.skewX,u.skewX,a,l),function(t,n,e,r,o,u){if(t!==e||n!==r){var a=o.push(i(o)+"scale(",null,",",null,")");u.push({i:a-4,x:Ye(t,e)},{i:a-2,x:Ye(n,r)})}else 1===e&&1===r||o.push(i(o)+"scale("+e+","+r+")")}(o.scaleX,o.scaleY,u.scaleX,u.scaleY,a,l),o=u=null,function(t){for(var n,e=-1,r=l.length;++e<r;)a[(n=l[e]).i]=n.x(t);return a.join("")}}}var Kr=Jr((function(t){return"none"===t?Zr:(Ir||(Ir=document.createElement("DIV"),$r=document.documentElement,Ur=document.defaultView),Ir.style.transform=t,t=Ur.getComputedStyle($r.appendChild(Ir),null).getPropertyValue("transform"),$r.removeChild(Ir),Gr(+(t=t.slice(7,-1).split(","))[0],+t[1],+t[2],+t[3],+t[4],+t[5]))}),"px, ","px)","deg)"),Qr=Jr((function(t){return null==t?Zr:(Yr||(Yr=document.createElementNS("http://www.w3.org/2000/svg","g")),Yr.setAttribute("transform",t),(t=Yr.transform.baseVal.consolidate())?Gr((t=t.matrix).a,t.b,t.c,t.d,t.e,t.f):Zr)}),", ",")",")");function Wr(t,n){var e,r;return function(){var i=Xr(this,t),o=i.tween;if(o!==e)for(var u=0,a=(r=e=o).length;u<a;++u)if(r[u].name===n){(r=r.slice()).splice(u,1);break}i.tween=r}}function ti(t,n,e){var r,i;if("function"!=typeof e)throw new Error;return function(){var o=Xr(this,t),u=o.tween;if(u!==r){i=(r=u).slice();for(var a={name:n,value:e},l=0,s=i.length;l<s;++l)if(i[l].name===n){i[l]=a;break}l===s&&i.push(a)}o.tween=i}}function ni(t,n,e){var r=t._id;return t.each((function(){var t=Xr(this,r);(t.value||(t.value={}))[n]=e.apply(this,arguments)})),function(t){return zr(t,r).value[n]}}function ei(t,n){var e;return("number"==typeof n?Ye:n instanceof Ee?ze:(e=Ee(n))?(n=e,ze):Je)(t,n)}function ri(t){return function(){this.removeAttribute(t)}}function ii(t){return function(){this.removeAttributeNS(t.space,t.local)}}function oi(t,n,e){var r,i,o=e+"";return function(){var u=this.getAttribute(t);return u===o?null:u===r?i:i=n(r=u,e)}}function ui(t,n,e){var r,i,o=e+"";return function(){var u=this.getAttributeNS(t.space,t.local);return u===o?null:u===r?i:i=n(r=u,e)}}function ai(t,n,e){var r,i,o;return function(){var u,a,l=e(this);if(null!=l)return(u=this.getAttribute(t))===(a=l+"")?null:u===r&&a===i?o:(i=a,o=n(r=u,l));this.removeAttribute(t)}}function li(t,n,e){var r,i,o;return function(){var u,a,l=e(this);if(null!=l)return(u=this.getAttributeNS(t.space,t.local))===(a=l+"")?null:u===r&&a===i?o:(i=a,o=n(r=u,l));this.removeAttributeNS(t.space,t.local)}}function si(t,n){return function(e){this.setAttribute(t,n.call(this,e))}}function ci(t,n){return function(e){this.setAttributeNS(t.space,t.local,n.call(this,e))}}function fi(t,n){var e,r;function i(){var i=n.apply(this,arguments);return i!==r&&(e=(r=i)&&ci(t,i)),e}return i._value=n,i}function hi(t,n){var e,r;function i(){var i=n.apply(this,arguments);return i!==r&&(e=(r=i)&&si(t,i)),e}return i._value=n,i}function pi(t,n){return function(){Vr(this,t).delay=+n.apply(this,arguments)}}function di(t,n){return n=+n,function(){Vr(this,t).delay=n}}function gi(t,n){return function(){Xr(this,t).duration=+n.apply(this,arguments)}}function vi(t,n){return n=+n,function(){Xr(this,t).duration=n}}function yi(t,n){if("function"!=typeof n)throw new Error;return function(){Xr(this,t).ease=n}}function mi(t,n,e){var r,i,o=function(t){return(t+"").trim().split(/^|\s+/).every((function(t){var n=t.indexOf(".");return n>=0&&(t=t.slice(0,n)),!t||"start"===t}))}(n)?Vr:Xr;return function(){var u=o(this,t),a=u.on;a!==r&&(i=(r=a).copy()).on(n,e),u.on=i}}var _i=Cn.prototype.constructor;function wi(t){return function(){this.style.removeProperty(t)}}function bi(t,n,e){return function(r){this.style.setProperty(t,n.call(this,r),e)}}function xi(t,n,e){var r,i;function o(){var o=n.apply(this,arguments);return o!==i&&(r=(i=o)&&bi(t,o,e)),r}return o._value=n,o}function Mi(t){return function(n){this.textContent=t.call(this,n)}}function Ai(t){var n,e;function r(){var r=t.apply(this,arguments);return r!==e&&(n=(e=r)&&Mi(r)),n}return r._value=t,r}var Ni=0;function Ei(t,n,e,r){this._groups=t,this._parents=n,this._name=e,this._id=r}function ki(){return++Ni}var Si=Cn.prototype;Ei.prototype=function(t){return Cn().transition(t)}.prototype={constructor:Ei,select:function(t){var n=this._name,e=this._id;"function"!=typeof t&&(t=gt(t));for(var r=this._groups,i=r.length,o=new Array(i),u=0;u<i;++u)for(var a,l,s=r[u],c=s.length,f=o[u]=new Array(c),h=0;h<c;++h)(a=s[h])&&(l=t.call(a,a.__data__,h,s))&&("__data__"in a&&(l.__data__=a.__data__),f[h]=l,Rr(f[h],n,e,h,f,zr(a,e)));return new Ei(o,this._parents,n,e)},selectAll:function(t){var n=this._name,e=this._id;"function"!=typeof t&&(t=mt(t));for(var r=this._groups,i=r.length,o=[],u=[],a=0;a<i;++a)for(var l,s=r[a],c=s.length,f=0;f<c;++f)if(l=s[f]){for(var h,p=t.call(l,l.__data__,f,s),d=zr(l,e),g=0,v=p.length;g<v;++g)(h=p[g])&&Rr(h,n,e,g,p,d);o.push(p),u.push(l)}return new Ei(o,u,n,e)},selectChild:Si.selectChild,selectChildren:Si.selectChildren,filter:function(t){"function"!=typeof t&&(t=_t(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i<e;++i)for(var o,u=n[i],a=u.length,l=r[i]=[],s=0;s<a;++s)(o=u[s])&&t.call(o,o.__data__,s,u)&&l.push(o);return new Ei(r,this._parents,this._name,this._id)},merge:function(t){if(t._id!==this._id)throw new Error;for(var n=this._groups,e=t._groups,r=n.length,i=e.length,o=Math.min(r,i),u=new Array(r),a=0;a<o;++a)for(var l,s=n[a],c=e[a],f=s.length,h=u[a]=new Array(f),p=0;p<f;++p)(l=s[p]||c[p])&&(h[p]=l);for(;a<r;++a)u[a]=n[a];return new Ei(u,this._parents,this._name,this._id)},selection:function(){return new _i(this._groups,this._parents)},transition:function(){for(var t=this._name,n=this._id,e=ki(),r=this._groups,i=r.length,o=0;o<i;++o)for(var u,a=r[o],l=a.length,s=0;s<l;++s)if(u=a[s]){var c=zr(u,n);Rr(u,t,e,s,a,{time:c.time+c.delay+c.duration,delay:0,duration:c.duration,ease:c.ease})}return new Ei(r,this._parents,t,e)},call:Si.call,nodes:Si.nodes,node:Si.node,size:Si.size,empty:Si.empty,each:Si.each,on:function(t,n){var e=this._id;return arguments.length<2?zr(this.node(),e).on.on(t):this.each(mi(e,t,n))},attr:function(t,n){var e=Tt(t),r="transform"===e?Qr:ei;return this.attrTween(t,"function"==typeof n?(e.local?li:ai)(e,r,ni(this,"attr."+t,n)):null==n?(e.local?ii:ri)(e):(e.local?ui:oi)(e,r,n))},attrTween:function(t,n){var e="attr."+t;if(arguments.length<2)return(e=this.tween(e))&&e._value;if(null==n)return this.tween(e,null);if("function"!=typeof n)throw new Error;var r=Tt(t);return this.tween(e,(r.local?fi:hi)(r,n))},style:function(t,n,e){var r="transform"==(t+="")?Kr:ei;return null==n?this.styleTween(t,function(t,n){var e,r,i;return function(){var o=Yt(this,t),u=(this.style.removeProperty(t),Yt(this,t));return o===u?null:o===e&&u===r?i:i=n(e=o,r=u)}}(t,r)).on("end.style."+t,wi(t)):"function"==typeof n?this.styleTween(t,function(t,n,e){var r,i,o;return function(){var u=Yt(this,t),a=e(this),l=a+"";return null==a&&(this.style.removeProperty(t),l=a=Yt(this,t)),u===l?null:u===r&&l===i?o:(i=l,o=n(r=u,a))}}(t,r,ni(this,"style."+t,n))).each(function(t,n){var e,r,i,o,u="style."+n,a="end."+u;return function(){var l=Xr(this,t),s=l.on,c=null==l.value[u]?o||(o=wi(n)):void 0;s===e&&i===c||(r=(e=s).copy()).on(a,i=c),l.on=r}}(this._id,t)):this.styleTween(t,function(t,n,e){var r,i,o=e+"";return function(){var u=Yt(this,t);return u===o?null:u===r?i:i=n(r=u,e)}}(t,r,n),e).on("end.style."+t,null)},styleTween:function(t,n,e){var r="style."+(t+="");if(arguments.length<2)return(r=this.tween(r))&&r._value;if(null==n)return this.tween(r,null);if("function"!=typeof n)throw new Error;return this.tween(r,xi(t,n,null==e?"":e))},text:function(t){return this.tween("text","function"==typeof t?function(t){return function(){var n=t(this);this.textContent=null==n?"":n}}(ni(this,"text",t)):function(t){return function(){this.textContent=t}}(null==t?"":t+""))},textTween:function(t){var n="text";if(arguments.length<1)return(n=this.tween(n))&&n._value;if(null==t)return this.tween(n,null);if("function"!=typeof t)throw new Error;return this.tween(n,Ai(t))},remove:function(){return this.on("end.remove",function(t){return function(){var n=this.parentNode;for(var e in this.__transition)if(+e!==t)return;n&&n.removeChild(this)}}(this._id))},tween:function(t,n){var e=this._id;if(t+="",arguments.length<2){for(var r,i=zr(this.node(),e).tween,o=0,u=i.length;o<u;++o)if((r=i[o]).name===t)return r.value;return null}return this.each((null==n?Wr:ti)(e,t,n))},delay:function(t){var n=this._id;return arguments.length?this.each(("function"==typeof t?pi:di)(n,t)):zr(this.node(),n).delay},duration:function(t){var n=this._id;return arguments.length?this.each(("function"==typeof t?gi:vi)(n,t)):zr(this.node(),n).duration},ease:function(t){var n=this._id;return arguments.length?this.each(yi(n,t)):zr(this.node(),n).ease},easeVarying:function(t){if("function"!=typeof t)throw new Error;return this.each(function(t,n){return function(){var e=n.apply(this,arguments);if("function"!=typeof e)throw new Error;Xr(this,t).ease=e}}(this._id,t))},end:function(){var t,n,e=this,r=e._id,i=e.size();return new Promise((function(o,u){var a={value:u},l={value:function(){0==--i&&o()}};e.each((function(){var e=Xr(this,r),i=e.on;i!==t&&((n=(t=i).copy())._.cancel.push(a),n._.interrupt.push(a),n._.end.push(l)),e.on=n})),0===i&&o()}))},[Symbol.iterator]:Si[Symbol.iterator]};var Ci={time:null,delay:0,duration:250,ease:fr};function Pi(t,n){for(var e;!(e=t.__transition)||!(e=e[n]);)if(!(t=t.parentNode))throw new Error(` + "`" + `transition ${n} not found` + "`" + `);return e}function ji(){let t=960,n=null,e=18,r=null,i=null,o="",u=750,a=fr,l=!1,s=!1,c=null,f=null,h=0,p=null,d=null,g=!1,v=!1,y=!1,m=null,_=!1,w=null,b=function(t){return t.data.n||t.data.name},x=function(t){return"v"in t?t.v:t.value},M=function(t){return t.c||t.children},A=function(t){return t.data.l||t.data.libtype},N=function(t){return"d"in t.data?t.data.d:t.data.delta},E=function(t,n,e){d=()=>{p&&(p.textContent="search: "+n+" of "+e+" total samples ( "+Xn(".3f")(n/e*100,3)+"%)")},d()};const k=E;let S=(t,n,e=!1)=>{if(!n)return!1;let r=b(t);e&&(n=n.toLowerCase(),r=r.toLowerCase());const i=new RegExp(n);return void 0!==r&&r&&r.match(i)};const C=S;let P=function(t){p&&(t?p.textContent=t:"function"==typeof d?d():p.textContent="")};const j=P;let q=function(t){return b(t)+" ("+Xn(".3f")(100*(t.x1-t.x0),3)+"%, "+x(t)+" samples)"},O=function(t){return t.highlight?"#E600E6":function(t,n){let e=w||"warm";w||void 0===n||""===n||(e="red",void 0!==t&&t&&t.match(/::/)&&(e="yellow"),"kernel"===n?e="orange":"jit"===n?e="green":"inlined"===n&&(e="aqua"));const r=function(t){let n=0;if(t){const e=t.split("` + "`" + `");e.length>1&&(t=e[e.length-1]),n=function(t){let n=0,e=0,r=1;if(t){for(let i=0;i<t.length&&!(i>6);i++)n+=r*(t.charCodeAt(i)%10),e+=9*r,r*=.7;e>0&&(n/=e)}return n}(t=t.split("(")[0])}return n}(t);return function(t,n){let e,r,i;return"red"===t?(e=200+Math.round(55*n),r=50+Math.round(80*n),i=r):"orange"===t?(e=190+Math.round(65*n),r=90+Math.round(65*n),i=0):"yellow"===t?(e=175+Math.round(55*n),r=e,i=50+Math.round(20*n)):"green"===t?(e=50+Math.round(60*n),r=200+Math.round(55*n),i=e):"pastelgreen"===t?(e=163+Math.round(75*n),r=195+Math.round(49*n),i=72+Math.round(149*n)):"blue"===t?(e=91+Math.round(126*n),r=156+Math.round(76*n),i=221+Math.round(26*n)):"aqua"===t?(e=50+Math.round(60*n),r=165+Math.round(55*n),i=r):"cold"===t?(e=0+Math.round(55*(1-n)),r=0+Math.round(230*(1-n)),i=200+Math.round(55*n)):(e=200+Math.round(55*n),r=0+Math.round(230*(1-n)),i=0+Math.round(55*(1-n))),"rgb("+e+","+r+","+i+")"}(e,r)}(b(t),A(t))};const L=O;function T(t){t.data.fade=!1,t.data.hide=!1,t.children&&t.children.forEach(T)}function B(t){t.parent&&(t.parent.data.fade=!0,B(t.parent))}function D(t){if(i&&i.hide(),function(t){let n,e,r,i=t,o=i.parent;for(;o;){for(n=o.children,e=n.length;e--;)r=n[e],r!==i&&(r.data.hide=!0);i=o,o=i.parent}}(t),T(t),B(t),z(),y){const n=Pn(this).select("svg")._groups[0][0].parentNode.offsetTop,r=(window.innerHeight-n)/e,i=(t.height-r+10)*e;window.scrollTo({top:n+i,left:0,behavior:"smooth"})}"function"==typeof c&&c(t)}function H(t,n){if(t.id===n)return t;{const e=M(t);if(e)for(let t=0;t<e.length;t++){const r=H(e[t],n);if(r)return r}}}function R(t){t.highlight=!1,M(t)&&M(t).forEach((function(t){R(t)}))}function V(t,n){return"function"==typeof l?l(t,n):l?Un(b(t),b(n)):void 0}const X=function(){var t=1,n=1,e=0,r=!1;function i(i){var o=i.height+1;return i.x0=i.y0=e,i.x1=t,i.y1=n/o,i.eachBefore(function(t,n){return function(r){r.children&&function(t,n,e,r,i){for(var o,u=t.children,a=-1,l=u.length,s=t.value&&(r-n)/t.value;++a<l;)(o=u[a]).y0=e,o.y1=i,o.x0=n,o.x1=n+=o.value*s}(r,r.x0,t*(r.depth+1)/n,r.x1,t*(r.depth+2)/n);var i=r.x0,o=r.y0,u=r.x1-e,a=r.y1-e;u<i&&(i=u=(i+u)/2),a<o&&(o=a=(o+a)/2),r.x0=i,r.y0=o,r.x1=u,r.y1=a}}(n,o)),r&&i.eachBefore(Yn),i}return i.round=function(t){return arguments.length?(r=!!t,i):r},i.size=function(e){return arguments.length?(t=+e[0],n=+e[1],i):[t,n]},i.padding=function(t){return arguments.length?(e=+t,i):e},i}();function z(){r.each((function(r){const o=cr().range([0,t]),c=cr().range([0,e]);$(r),l&&r.sort(V),X(r);const p=t/(r.x1-r.x0);function d(t){return(t.x1-t.x0)*p}const g=function(n){let e=n.descendants();if(h>0){const r=t/(n.x1-n.x0);e=e.filter((function(t){return(t.x1-t.x0)*r>h}))}return e}(r),y=Pn(this).select("svg");y.attr("width",t);let _=y.selectAll("g").data(g,(function(t){return t.id}));if(!n||v){const t=Math.max.apply(null,g.map((function(t){return t.depth})));n=(t+3)*e,n<m&&(n=m),y.attr("height",n)}_.transition().duration(u).ease(a).attr("transform",(function(t){return"translate("+o(t.x0)+","+(s?c(t.depth):n-c(t.depth)-e)+")"})),_.select("rect").transition().duration(u).ease(a).attr("width",d);const w=_.enter().append("svg:g").attr("transform",(function(t){return"translate("+o(t.x0)+","+(s?c(t.depth):n-c(t.depth)-e)+")"}));w.append("svg:rect").transition().delay(u/2).attr("width",d),i||w.append("svg:title"),w.append("foreignObject").append("xhtml:div"),_=y.selectAll("g").data(g,(function(t){return t.id})),_.attr("width",d).attr("height",(function(t){return e})).attr("name",(function(t){return b(t)})).attr("class",(function(t){return t.data.fade?"frame fade":"frame"})),_.select("rect").attr("height",(function(t){return e})).attr("fill",(function(t){return O(t)})),i||_.select("title").text(q),_.select("foreignObject").attr("width",d).attr("height",(function(t){return e})).select("div").attr("class","d3-flame-graph-label").style("display",(function(t){return d(t)<35?"none":"block"})).transition().delay(u).text(b),_.on("click",((t,n)=>{D(n)})),_.exit().remove(),_.on("mouseover",(function(t,n){i&&i.show(n,this),P(q(n)),"function"==typeof f&&f(n)})).on("mouseout",(function(){i&&i.hide(),P(null)}))}))}function I(t,n){n.forEach((function(n){const e=t.find((function(t){return t.name===n.name}));e?(e.value+=n.value,n.children&&(e.children||(e.children=[]),I(e.children,n.children))):t.push(n)}))}function $(t){let n,e,r,i,o,u,a,l;const s=[],c=[],f=[],h=!g;let p=t.data;for(p.hide?(t.value=0,e=t.children,e&&f.push(e)):(t.value=p.fade?0:x(p),s.push(t));n=s.pop();)if(e=n.children,e&&(o=e.length)){for(i=0;o--;)a=e[o],p=a.data,p.hide?(a.value=0,r=a.children,r&&f.push(r)):(p.fade?a.value=0:(l=x(p),a.value=l,i+=l),s.push(a));h&&n.value&&(n.value-=i),c.push(e)}for(o=c.length;o--;){for(e=c[o],i=0,u=e.length;u--;)i+=e[u].value;e[0].parent.value+=i}for(;f.length;)for(e=f.pop(),u=e.length;u--;)a=e[u],a.value=0,r=a.children,r&&f.push(r)}function U(){r.datum((t=>{if("Node"!==t.constructor.name){const n=Zn(t,M);return function(t){let n=0;!function(t,n){n(t);let e=t.children;if(e){const t=[e];let r,i,o;for(;t.length;)for(e=t.pop(),r=e.length;r--;)i=e[r],n(i),o=i.children,o&&t.push(o)}}(t,(function(t){t.id=n++}))}(n),$(n),n.originalValue=n.value,_&&n.eachAfter((t=>{let n=N(t);const e=t.children;let r=e&&e.length;for(;--r>=0;)n+=e[r].delta;t.delta=n})),n}}))}function Y(e){if(!arguments.length)return Y;r=e,U(),r.each((function(e){if(0===Pn(this).select("svg").size()){const e=Pn(this).append("svg:svg").attr("width",t).attr("class","partition d3-flame-graph");n&&(n<m&&(n=m),e.attr("height",n)),e.append("svg:text").attr("class","title").attr("text-anchor","middle").attr("y","25").attr("x",t/2).attr("fill","#808080").text(o),i&&e.call(i)}})),z()}return Y.height=function(t){return arguments.length?(n=t,Y):n},Y.minHeight=function(t){return arguments.length?(m=t,Y):m},Y.width=function(n){return arguments.length?(t=n,Y):t},Y.cellHeight=function(t){return arguments.length?(e=t,Y):e},Y.tooltip=function(t){return arguments.length?("function"==typeof t&&(i=t),Y):i},Y.title=function(t){return arguments.length?(o=t,Y):o},Y.transitionDuration=function(t){return arguments.length?(u=t,Y):u},Y.transitionEase=function(t){return arguments.length?(a=t,Y):a},Y.sort=function(t){return arguments.length?(l=t,Y):l},Y.inverted=function(t){return arguments.length?(s=t,Y):s},Y.computeDelta=function(t){return arguments.length?(_=t,Y):_},Y.setLabelHandler=function(t){return arguments.length?(q=t,Y):q},Y.label=Y.setLabelHandler,Y.search=function(t){const n=[];let e=0,i=0;r.each((function(r){const o=function(t,n){const e=[];let r=0;return function t(i,o){let u=!1;S(i,n)?(i.highlight=!0,u=!0,o||(r+=x(i)),e.push(i)):i.highlight=!1,M(i)&&M(i).forEach((function(n){t(n,o||u)}))}(t,!1),[e,r]}(r,t);n.push(...o[0]),e+=o[1],i+=r.originalValue})),E(n,e,i),z()},Y.findById=function(t){if(null==t)return null;let n=null;return r.each((function(e){null===n&&(n=H(e,t))})),n},Y.clear=function(){P(null),r.each((function(t){R(t),z()}))},Y.zoomTo=function(t){D(t)},Y.resetZoom=function(){r.each((function(t){D(t)}))},Y.onClick=function(t){return arguments.length?(c=t,Y):c},Y.onHover=function(t){return arguments.length?(f=t,Y):f},Y.merge=function(t){return r?(this.resetZoom(),d=null,P(null),r.datum((n=>(I([n.data],[t]),n.data))),U(),z(),Y):Y},Y.update=function(t){return r?(t&&(r.datum(t),U()),z(),Y):Y},Y.destroy=function(){return r?(i&&(i.hide(),"function"==typeof i.destroy&&i.destroy()),r.selectAll("svg").remove(),Y):Y},Y.setColorMapper=function(t){return arguments.length?(O=n=>{const e=L(n);return t(n,e)},Y):(O=L,Y)},Y.color=Y.setColorMapper,Y.setColorHue=function(t){return arguments.length?(w=t,Y):(w=null,Y)},Y.minFrameSize=function(t){return arguments.length?(h=t,Y):h},Y.setDetailsElement=function(t){return arguments.length?(p=t,Y):p},Y.details=Y.setDetailsElement,Y.selfValue=function(t){return arguments.length?(g=t,Y):g},Y.resetHeightOnZoom=function(t){return arguments.length?(v=t,Y):v},Y.scrollOnZoom=function(t){return arguments.length?(y=t,Y):y},Y.getName=function(t){return arguments.length?(b=t,Y):b},Y.getValue=function(t){return arguments.length?(x=t,Y):x},Y.getChildren=function(t){return arguments.length?(M=t,Y):M},Y.getLibtype=function(t){return arguments.length?(A=t,Y):A},Y.getDelta=function(t){return arguments.length?(N=t,Y):N},Y.setSearchHandler=function(t){return arguments.length?(E=t,Y):(E=k,Y)},Y.setDetailsHandler=function(t){return arguments.length?(P=t,Y):(P=j,Y)},Y.setSearchMatch=function(t){return arguments.length?(S=t,Y):(S=C,Y)},Y}return Cn.prototype.interrupt=function(t){return this.each((function(){!function(t,n){var e,r,i,o=t.__transition,u=!0;if(o){for(i in n=null==n?null:n+"",o)(e=o[i]).name===n?(r=e.state>2&&e.state<5,e.state=6,e.timer.stop(),e.on.call(r?"interrupt":"cancel",t,t.__data__,e.index,e.group),delete o[i]):u=!1;u&&delete t.__transition}}(this,t)}))},Cn.prototype.transition=function(t){var n,e;t instanceof Ei?(n=t._id,t=t._name):(n=ki(),(e=Ci).time=Cr(),t=null==t?null:t+"");for(var r=this._groups,i=r.length,o=0;o<i;++o)for(var u,a=r[o],l=a.length,s=0;s<l;++s)(u=a[s])&&Rr(u,t,n,s,a,e||Pi(u,n));return new Ei(r,this._parents,t,n)},n})()}));
+`
+
+// CSSSource returns the d3-flamegraph.css file
+const CSSSource = `
+.d3-flame-graph rect {
+ stroke: #EEEEEE;
+ fill-opacity: .8;
+}
+
+.d3-flame-graph rect:hover {
+ stroke: #474747;
+ stroke-width: 0.5;
+ cursor: pointer;
+}
+
+.d3-flame-graph-label {
+ pointer-events: none;
+ white-space: nowrap;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ font-size: 12px;
+ font-family: Verdana;
+ margin-left: 4px;
+ margin-right: 4px;
+ line-height: 1.5;
+ padding: 0 0 0;
+ font-weight: 400;
+ color: black;
+ text-align: left;
+}
+
+.d3-flame-graph .fade {
+ opacity: 0.6 !important;
+}
+
+.d3-flame-graph .title {
+ font-size: 20px;
+ font-family: Verdana;
+}
+
+.d3-flame-graph-tip {
+ background-color: black;
+ border: none;
+ border-radius: 3px;
+ padding: 5px 10px 5px 10px;
+ min-width: 250px;
+ text-align: left;
+ color: white;
+ z-index: 10;
+}
+`
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/index.js b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/index.js
new file mode 100644
index 0000000..d6e0b5b
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/index.js
@@ -0,0 +1,13 @@
+// This file exports a stripped-down API surface of d3 and d3-flame-graph,
+// using only the functions used by pprof.
+
+export {
+ select,
+} from "d3-selection";
+
+export {
+ default as flamegraph
+// If we export from "d3-flame-graph" that exports the "dist" version which
+// includes another copy of d3-selection. To avoid including d3-selection
+// twice in the output, instead import the "src" version.
+} from "d3-flame-graph/src/flamegraph";
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package-lock.json b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package-lock.json
new file mode 100644
index 0000000..d5caba6
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package-lock.json
@@ -0,0 +1,1106 @@
+{
+ "name": "d3-pprof",
+ "version": "2.0.0",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "@discoveryjs/json-ext": {
+ "version": "0.5.6",
+ "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.6.tgz",
+ "integrity": "sha512-ws57AidsDvREKrZKYffXddNkyaF14iHNHm8VQnZH6t99E8gczjNN0GpvcGny0imC80yQ0tHz1xVUKk/KFQSUyA==",
+ "dev": true
+ },
+ "@types/eslint": {
+ "version": "8.2.1",
+ "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.2.1.tgz",
+ "integrity": "sha512-UP9rzNn/XyGwb5RQ2fok+DzcIRIYwc16qTXse5+Smsy8MOIccCChT15KAwnsgQx4PzJkaMq4myFyZ4CL5TjhIQ==",
+ "dev": true,
+ "requires": {
+ "@types/estree": "*",
+ "@types/json-schema": "*"
+ }
+ },
+ "@types/eslint-scope": {
+ "version": "3.7.1",
+ "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.1.tgz",
+ "integrity": "sha512-SCFeogqiptms4Fg29WpOTk5nHIzfpKCemSN63ksBQYKTcXoJEmJagV+DhVmbapZzY4/5YaOV1nZwrsU79fFm1g==",
+ "dev": true,
+ "requires": {
+ "@types/eslint": "*",
+ "@types/estree": "*"
+ }
+ },
+ "@types/estree": {
+ "version": "0.0.50",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.50.tgz",
+ "integrity": "sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==",
+ "dev": true
+ },
+ "@types/json-schema": {
+ "version": "7.0.9",
+ "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz",
+ "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==",
+ "dev": true
+ },
+ "@types/node": {
+ "version": "16.11.11",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.11.tgz",
+ "integrity": "sha512-KB0sixD67CeecHC33MYn+eYARkqTheIRNuu97y2XMjR7Wu3XibO1vaY6VBV6O/a89SPI81cEUIYT87UqUWlZNw==",
+ "dev": true
+ },
+ "@webassemblyjs/ast": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
+ "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/helper-numbers": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1"
+ }
+ },
+ "@webassemblyjs/floating-point-hex-parser": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz",
+ "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==",
+ "dev": true
+ },
+ "@webassemblyjs/helper-api-error": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz",
+ "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==",
+ "dev": true
+ },
+ "@webassemblyjs/helper-buffer": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz",
+ "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==",
+ "dev": true
+ },
+ "@webassemblyjs/helper-numbers": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz",
+ "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/floating-point-hex-parser": "1.11.1",
+ "@webassemblyjs/helper-api-error": "1.11.1",
+ "@xtuc/long": "4.2.2"
+ }
+ },
+ "@webassemblyjs/helper-wasm-bytecode": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz",
+ "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==",
+ "dev": true
+ },
+ "@webassemblyjs/helper-wasm-section": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz",
+ "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-buffer": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/wasm-gen": "1.11.1"
+ }
+ },
+ "@webassemblyjs/ieee754": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz",
+ "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==",
+ "dev": true,
+ "requires": {
+ "@xtuc/ieee754": "^1.2.0"
+ }
+ },
+ "@webassemblyjs/leb128": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz",
+ "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==",
+ "dev": true,
+ "requires": {
+ "@xtuc/long": "4.2.2"
+ }
+ },
+ "@webassemblyjs/utf8": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz",
+ "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==",
+ "dev": true
+ },
+ "@webassemblyjs/wasm-edit": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz",
+ "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-buffer": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/helper-wasm-section": "1.11.1",
+ "@webassemblyjs/wasm-gen": "1.11.1",
+ "@webassemblyjs/wasm-opt": "1.11.1",
+ "@webassemblyjs/wasm-parser": "1.11.1",
+ "@webassemblyjs/wast-printer": "1.11.1"
+ }
+ },
+ "@webassemblyjs/wasm-gen": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz",
+ "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/ieee754": "1.11.1",
+ "@webassemblyjs/leb128": "1.11.1",
+ "@webassemblyjs/utf8": "1.11.1"
+ }
+ },
+ "@webassemblyjs/wasm-opt": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz",
+ "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-buffer": "1.11.1",
+ "@webassemblyjs/wasm-gen": "1.11.1",
+ "@webassemblyjs/wasm-parser": "1.11.1"
+ }
+ },
+ "@webassemblyjs/wasm-parser": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz",
+ "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-api-error": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/ieee754": "1.11.1",
+ "@webassemblyjs/leb128": "1.11.1",
+ "@webassemblyjs/utf8": "1.11.1"
+ }
+ },
+ "@webassemblyjs/wast-printer": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz",
+ "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==",
+ "dev": true,
+ "requires": {
+ "@webassemblyjs/ast": "1.11.1",
+ "@xtuc/long": "4.2.2"
+ }
+ },
+ "@webpack-cli/configtest": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.0.tgz",
+ "integrity": "sha512-ttOkEkoalEHa7RaFYpM0ErK1xc4twg3Am9hfHhL7MVqlHebnkYd2wuI/ZqTDj0cVzZho6PdinY0phFZV3O0Mzg==",
+ "dev": true
+ },
+ "@webpack-cli/info": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.0.tgz",
+ "integrity": "sha512-F6b+Man0rwE4n0409FyAJHStYA5OIZERxmnUfLVwv0mc0V1wLad3V7jqRlMkgKBeAq07jUvglacNaa6g9lOpuw==",
+ "dev": true,
+ "requires": {
+ "envinfo": "^7.7.3"
+ }
+ },
+ "@webpack-cli/serve": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.0.tgz",
+ "integrity": "sha512-ZkVeqEmRpBV2GHvjjUZqEai2PpUbuq8Bqd//vEYsp63J8WyexI8ppCqVS3Zs0QADf6aWuPdU+0XsPI647PVlQA==",
+ "dev": true
+ },
+ "@xtuc/ieee754": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz",
+ "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==",
+ "dev": true
+ },
+ "@xtuc/long": {
+ "version": "4.2.2",
+ "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz",
+ "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==",
+ "dev": true
+ },
+ "acorn": {
+ "version": "8.6.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz",
+ "integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==",
+ "dev": true
+ },
+ "acorn-import-assertions": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz",
+ "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==",
+ "dev": true
+ },
+ "ajv": {
+ "version": "6.12.6",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
+ "dev": true,
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "fast-json-stable-stringify": "^2.0.0",
+ "json-schema-traverse": "^0.4.1",
+ "uri-js": "^4.2.2"
+ }
+ },
+ "ajv-keywords": {
+ "version": "3.5.2",
+ "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz",
+ "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==",
+ "dev": true
+ },
+ "browserslist": {
+ "version": "4.18.1",
+ "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.18.1.tgz",
+ "integrity": "sha512-8ScCzdpPwR2wQh8IT82CA2VgDwjHyqMovPBZSNH54+tm4Jk2pCuv90gmAdH6J84OCRWi0b4gMe6O6XPXuJnjgQ==",
+ "dev": true,
+ "requires": {
+ "caniuse-lite": "^1.0.30001280",
+ "electron-to-chromium": "^1.3.896",
+ "escalade": "^3.1.1",
+ "node-releases": "^2.0.1",
+ "picocolors": "^1.0.0"
+ }
+ },
+ "buffer-from": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
+ "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
+ "dev": true
+ },
+ "caniuse-lite": {
+ "version": "1.0.30001284",
+ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001284.tgz",
+ "integrity": "sha512-t28SKa7g6kiIQi6NHeOcKrOrGMzCRrXvlasPwWC26TH2QNdglgzQIRUuJ0cR3NeQPH+5jpuveeeSFDLm2zbkEw==",
+ "dev": true
+ },
+ "chrome-trace-event": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz",
+ "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==",
+ "dev": true
+ },
+ "clone-deep": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz",
+ "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==",
+ "dev": true,
+ "requires": {
+ "is-plain-object": "^2.0.4",
+ "kind-of": "^6.0.2",
+ "shallow-clone": "^3.0.0"
+ }
+ },
+ "colorette": {
+ "version": "2.0.16",
+ "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz",
+ "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==",
+ "dev": true
+ },
+ "commander": {
+ "version": "2.20.3",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
+ "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
+ "dev": true
+ },
+ "cross-spawn": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+ "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ }
+ },
+ "d3-color": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-1.4.1.tgz",
+ "integrity": "sha512-p2sTHSLCJI2QKunbGb7ocOh7DgTAn8IrLx21QRc/BSnodXM4sv6aLQlnfpvehFMLZEfBc6g9pH9SWQccFYfJ9Q=="
+ },
+ "d3-flame-graph": {
+ "version": "4.1.3",
+ "resolved": "https://registry.npmjs.org/d3-flame-graph/-/d3-flame-graph-4.1.3.tgz",
+ "integrity": "sha512-NijuhJZhaTMwobVgwGQ67x9PovqMMHXBbs0FMHEGJvsWZGuL4M7OsB03v8mHdyVyHhnQYGsYnb5w021e9+R+RQ==",
+ "requires": {
+ "d3-array": "^3.1.1",
+ "d3-dispatch": "^3.0.1",
+ "d3-ease": "^3.0.1",
+ "d3-format": "^3.0.1",
+ "d3-hierarchy": "^3.0.1",
+ "d3-scale": "^4.0.2",
+ "d3-selection": "^3.0.0",
+ "d3-transition": "^3.0.1"
+ },
+ "dependencies": {
+ "d3-array": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.1.1.tgz",
+ "integrity": "sha512-33qQ+ZoZlli19IFiQx4QEpf2CBEayMRzhlisJHSCsSUbDXv6ZishqS1x7uFVClKG4Wr7rZVHvaAttoLow6GqdQ==",
+ "requires": {
+ "internmap": "1 - 2"
+ }
+ },
+ "d3-dispatch": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz",
+ "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg=="
+ },
+ "d3-ease": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz",
+ "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w=="
+ },
+ "d3-format": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.0.1.tgz",
+ "integrity": "sha512-hdL7+HBIohpgfolhBxr1KX47VMD6+vVD/oEFrxk5yhmzV2prk99EkFKYpXuhVkFpTgHdJ6/4bYcjdLPPXV4tIA=="
+ },
+ "d3-hierarchy": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.0.1.tgz",
+ "integrity": "sha512-RlLTaofEoOrMK1JoXYIGhKTkJFI/6rFrYPgxy6QlZo2BcVc4HGTqEU0rPpzuMq5T/5XcMtAzv1XiLA3zRTfygw=="
+ },
+ "d3-scale": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz",
+ "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==",
+ "requires": {
+ "d3-array": "2.10.0 - 3",
+ "d3-format": "1 - 3",
+ "d3-interpolate": "1.2.0 - 3",
+ "d3-time": "2.1.1 - 3",
+ "d3-time-format": "2 - 4"
+ }
+ },
+ "d3-selection": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
+ "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ=="
+ },
+ "d3-time": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.0.0.tgz",
+ "integrity": "sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ==",
+ "requires": {
+ "d3-array": "2 - 3"
+ }
+ },
+ "d3-transition": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz",
+ "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==",
+ "requires": {
+ "d3-color": "1 - 3",
+ "d3-dispatch": "1 - 3",
+ "d3-ease": "1 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-timer": "1 - 3"
+ }
+ }
+ }
+ },
+ "d3-interpolate": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-1.4.0.tgz",
+ "integrity": "sha512-V9znK0zc3jOPV4VD2zZn0sDhZU3WAE2bmlxdIwwQPPzPjvyLkd8B3JUVdS1IDUFDkWZ72c9qnv1GK2ZagTZ8EA==",
+ "requires": {
+ "d3-color": "1"
+ }
+ },
+ "d3-selection": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
+ "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ=="
+ },
+ "d3-time": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-1.1.0.tgz",
+ "integrity": "sha512-Xh0isrZ5rPYYdqhAVk8VLnMEidhz5aP7htAADH6MfzgmmicPkTo8LhkLxci61/lCB7n7UmE3bN0leRt+qvkLxA=="
+ },
+ "d3-time-format": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-2.3.0.tgz",
+ "integrity": "sha512-guv6b2H37s2Uq/GefleCDtbe0XZAuy7Wa49VGkPVPMfLL9qObgBST3lEHJBMUp8S7NdLQAGIvr2KXk8Hc98iKQ==",
+ "requires": {
+ "d3-time": "1"
+ }
+ },
+ "d3-timer": {
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-1.0.10.tgz",
+ "integrity": "sha512-B1JDm0XDaQC+uvo4DT79H0XmBskgS3l6Ve+1SBCfxgmtIb1AVrPIoqd+nPSv+loMX8szQ0sVUhGngL7D5QPiXw=="
+ },
+ "electron-to-chromium": {
+ "version": "1.4.11",
+ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.11.tgz",
+ "integrity": "sha512-2OhsaYgsWGhWjx2et8kaUcdktPbBGjKM2X0BReUCKcSCPttEY+hz2zie820JLbttU8jwL92+JJysWwkut3wZgA==",
+ "dev": true
+ },
+ "enhanced-resolve": {
+ "version": "5.8.3",
+ "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz",
+ "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==",
+ "dev": true,
+ "requires": {
+ "graceful-fs": "^4.2.4",
+ "tapable": "^2.2.0"
+ }
+ },
+ "envinfo": {
+ "version": "7.8.1",
+ "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz",
+ "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==",
+ "dev": true
+ },
+ "es-module-lexer": {
+ "version": "0.9.3",
+ "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz",
+ "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==",
+ "dev": true
+ },
+ "escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true
+ },
+ "eslint-scope": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
+ "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
+ "dev": true,
+ "requires": {
+ "esrecurse": "^4.3.0",
+ "estraverse": "^4.1.1"
+ }
+ },
+ "esrecurse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
+ "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
+ "dev": true,
+ "requires": {
+ "estraverse": "^5.2.0"
+ },
+ "dependencies": {
+ "estraverse": {
+ "version": "5.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
+ "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
+ "dev": true
+ }
+ }
+ },
+ "estraverse": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
+ "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
+ "dev": true
+ },
+ "events": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
+ "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
+ "dev": true
+ },
+ "execa": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
+ "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==",
+ "dev": true,
+ "requires": {
+ "cross-spawn": "^7.0.3",
+ "get-stream": "^6.0.0",
+ "human-signals": "^2.1.0",
+ "is-stream": "^2.0.0",
+ "merge-stream": "^2.0.0",
+ "npm-run-path": "^4.0.1",
+ "onetime": "^5.1.2",
+ "signal-exit": "^3.0.3",
+ "strip-final-newline": "^2.0.0"
+ }
+ },
+ "fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "dev": true
+ },
+ "fast-json-stable-stringify": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
+ "dev": true
+ },
+ "fastest-levenshtein": {
+ "version": "1.0.12",
+ "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz",
+ "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==",
+ "dev": true
+ },
+ "find-up": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+ "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
+ "dev": true,
+ "requires": {
+ "locate-path": "^5.0.0",
+ "path-exists": "^4.0.0"
+ }
+ },
+ "function-bind": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
+ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
+ "dev": true
+ },
+ "get-stream": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
+ "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
+ "dev": true
+ },
+ "glob-to-regexp": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz",
+ "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==",
+ "dev": true
+ },
+ "graceful-fs": {
+ "version": "4.2.8",
+ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz",
+ "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==",
+ "dev": true
+ },
+ "has": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
+ "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
+ "dev": true,
+ "requires": {
+ "function-bind": "^1.1.1"
+ }
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "human-signals": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
+ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
+ "dev": true
+ },
+ "import-local": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.0.3.tgz",
+ "integrity": "sha512-bE9iaUY3CXH8Cwfan/abDKAxe1KGT9kyGsBPqf6DMK/z0a2OzAsrukeYNgIH6cH5Xr452jb1TUL8rSfCLjZ9uA==",
+ "dev": true,
+ "requires": {
+ "pkg-dir": "^4.2.0",
+ "resolve-cwd": "^3.0.0"
+ }
+ },
+ "internmap": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz",
+ "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg=="
+ },
+ "interpret": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz",
+ "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==",
+ "dev": true
+ },
+ "is-core-module": {
+ "version": "2.8.0",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz",
+ "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==",
+ "dev": true,
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "is-plain-object": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
+ "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==",
+ "dev": true,
+ "requires": {
+ "isobject": "^3.0.1"
+ }
+ },
+ "is-stream": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
+ "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
+ "dev": true
+ },
+ "isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+ "dev": true
+ },
+ "isobject": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
+ "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=",
+ "dev": true
+ },
+ "jest-worker": {
+ "version": "27.4.2",
+ "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.4.2.tgz",
+ "integrity": "sha512-0QMy/zPovLfUPyHuOuuU4E+kGACXXE84nRnq6lBVI9GJg5DCBiA97SATi+ZP8CpiJwEQy1oCPjRBf8AnLjN+Ag==",
+ "dev": true,
+ "requires": {
+ "@types/node": "*",
+ "merge-stream": "^2.0.0",
+ "supports-color": "^8.0.0"
+ }
+ },
+ "json-parse-better-errors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz",
+ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==",
+ "dev": true
+ },
+ "json-schema-traverse": {
+ "version": "0.4.1",
+ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
+ "dev": true
+ },
+ "kind-of": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
+ "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
+ "dev": true
+ },
+ "loader-runner": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz",
+ "integrity": "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==",
+ "dev": true
+ },
+ "locate-path": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+ "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
+ "dev": true,
+ "requires": {
+ "p-locate": "^4.1.0"
+ }
+ },
+ "merge-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
+ "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
+ "dev": true
+ },
+ "mime-db": {
+ "version": "1.51.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz",
+ "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==",
+ "dev": true
+ },
+ "mime-types": {
+ "version": "2.1.34",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz",
+ "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==",
+ "dev": true,
+ "requires": {
+ "mime-db": "1.51.0"
+ }
+ },
+ "mimic-fn": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
+ "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
+ "dev": true
+ },
+ "neo-async": {
+ "version": "2.6.2",
+ "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
+ "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
+ "dev": true
+ },
+ "node-releases": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz",
+ "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==",
+ "dev": true
+ },
+ "npm-run-path": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
+ "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.0.0"
+ }
+ },
+ "onetime": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
+ "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
+ "dev": true,
+ "requires": {
+ "mimic-fn": "^2.1.0"
+ }
+ },
+ "p-limit": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+ "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
+ "dev": true,
+ "requires": {
+ "p-try": "^2.0.0"
+ }
+ },
+ "p-locate": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+ "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
+ "dev": true,
+ "requires": {
+ "p-limit": "^2.2.0"
+ }
+ },
+ "p-try": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
+ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
+ "dev": true
+ },
+ "path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true
+ },
+ "path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true
+ },
+ "path-parse": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
+ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
+ "dev": true
+ },
+ "picocolors": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
+ "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
+ "dev": true
+ },
+ "pkg-dir": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
+ "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
+ "dev": true,
+ "requires": {
+ "find-up": "^4.0.0"
+ }
+ },
+ "punycode": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
+ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+ "dev": true
+ },
+ "randombytes": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
+ "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
+ "dev": true,
+ "requires": {
+ "safe-buffer": "^5.1.0"
+ }
+ },
+ "rechoir": {
+ "version": "0.7.1",
+ "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz",
+ "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==",
+ "dev": true,
+ "requires": {
+ "resolve": "^1.9.0"
+ }
+ },
+ "resolve": {
+ "version": "1.20.0",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz",
+ "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==",
+ "dev": true,
+ "requires": {
+ "is-core-module": "^2.2.0",
+ "path-parse": "^1.0.6"
+ }
+ },
+ "resolve-cwd": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz",
+ "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==",
+ "dev": true,
+ "requires": {
+ "resolve-from": "^5.0.0"
+ }
+ },
+ "resolve-from": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
+ "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
+ "dev": true
+ },
+ "safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "dev": true
+ },
+ "schema-utils": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz",
+ "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==",
+ "dev": true,
+ "requires": {
+ "@types/json-schema": "^7.0.8",
+ "ajv": "^6.12.5",
+ "ajv-keywords": "^3.5.2"
+ }
+ },
+ "serialize-javascript": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
+ "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
+ "dev": true,
+ "requires": {
+ "randombytes": "^2.1.0"
+ }
+ },
+ "shallow-clone": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
+ "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==",
+ "dev": true,
+ "requires": {
+ "kind-of": "^6.0.2"
+ }
+ },
+ "shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^3.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true
+ },
+ "signal-exit": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz",
+ "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==",
+ "dev": true
+ },
+ "source-map": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
+ "dev": true
+ },
+ "source-map-support": {
+ "version": "0.5.21",
+ "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
+ "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
+ "dev": true,
+ "requires": {
+ "buffer-from": "^1.0.0",
+ "source-map": "^0.6.0"
+ }
+ },
+ "strip-final-newline": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
+ "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
+ "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ },
+ "tapable": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz",
+ "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==",
+ "dev": true
+ },
+ "terser": {
+ "version": "5.10.0",
+ "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz",
+ "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==",
+ "dev": true,
+ "requires": {
+ "commander": "^2.20.0",
+ "source-map": "~0.7.2",
+ "source-map-support": "~0.5.20"
+ },
+ "dependencies": {
+ "source-map": {
+ "version": "0.7.3",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
+ "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
+ "dev": true
+ }
+ }
+ },
+ "terser-webpack-plugin": {
+ "version": "5.2.5",
+ "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.2.5.tgz",
+ "integrity": "sha512-3luOVHku5l0QBeYS8r4CdHYWEGMmIj3H1U64jgkdZzECcSOJAyJ9TjuqcQZvw1Y+4AOBN9SeYJPJmFn2cM4/2g==",
+ "dev": true,
+ "requires": {
+ "jest-worker": "^27.0.6",
+ "schema-utils": "^3.1.1",
+ "serialize-javascript": "^6.0.0",
+ "source-map": "^0.6.1",
+ "terser": "^5.7.2"
+ }
+ },
+ "uri-js": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
+ "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
+ "dev": true,
+ "requires": {
+ "punycode": "^2.1.0"
+ }
+ },
+ "watchpack": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.0.tgz",
+ "integrity": "sha512-MnN0Q1OsvB/GGHETrFeZPQaOelWh/7O+EiFlj8sM9GPjtQkis7k01aAxrg/18kTfoIVcLL+haEVFlXDaSRwKRw==",
+ "dev": true,
+ "requires": {
+ "glob-to-regexp": "^0.4.1",
+ "graceful-fs": "^4.1.2"
+ }
+ },
+ "webpack": {
+ "version": "5.64.4",
+ "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.64.4.tgz",
+ "integrity": "sha512-LWhqfKjCLoYJLKJY8wk2C3h77i8VyHowG3qYNZiIqD6D0ZS40439S/KVuc/PY48jp2yQmy0mhMknq8cys4jFMw==",
+ "dev": true,
+ "requires": {
+ "@types/eslint-scope": "^3.7.0",
+ "@types/estree": "^0.0.50",
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/wasm-edit": "1.11.1",
+ "@webassemblyjs/wasm-parser": "1.11.1",
+ "acorn": "^8.4.1",
+ "acorn-import-assertions": "^1.7.6",
+ "browserslist": "^4.14.5",
+ "chrome-trace-event": "^1.0.2",
+ "enhanced-resolve": "^5.8.3",
+ "es-module-lexer": "^0.9.0",
+ "eslint-scope": "5.1.1",
+ "events": "^3.2.0",
+ "glob-to-regexp": "^0.4.1",
+ "graceful-fs": "^4.2.4",
+ "json-parse-better-errors": "^1.0.2",
+ "loader-runner": "^4.2.0",
+ "mime-types": "^2.1.27",
+ "neo-async": "^2.6.2",
+ "schema-utils": "^3.1.0",
+ "tapable": "^2.1.1",
+ "terser-webpack-plugin": "^5.1.3",
+ "watchpack": "^2.3.0",
+ "webpack-sources": "^3.2.2"
+ }
+ },
+ "webpack-cli": {
+ "version": "4.9.1",
+ "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.1.tgz",
+ "integrity": "sha512-JYRFVuyFpzDxMDB+v/nanUdQYcZtqFPGzmlW4s+UkPMFhSpfRNmf1z4AwYcHJVdvEFAM7FFCQdNTpsBYhDLusQ==",
+ "dev": true,
+ "requires": {
+ "@discoveryjs/json-ext": "^0.5.0",
+ "@webpack-cli/configtest": "^1.1.0",
+ "@webpack-cli/info": "^1.4.0",
+ "@webpack-cli/serve": "^1.6.0",
+ "colorette": "^2.0.14",
+ "commander": "^7.0.0",
+ "execa": "^5.0.0",
+ "fastest-levenshtein": "^1.0.12",
+ "import-local": "^3.0.2",
+ "interpret": "^2.2.0",
+ "rechoir": "^0.7.0",
+ "webpack-merge": "^5.7.3"
+ },
+ "dependencies": {
+ "commander": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
+ "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
+ "dev": true
+ }
+ }
+ },
+ "webpack-merge": {
+ "version": "5.8.0",
+ "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz",
+ "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==",
+ "dev": true,
+ "requires": {
+ "clone-deep": "^4.0.1",
+ "wildcard": "^2.0.0"
+ }
+ },
+ "webpack-sources": {
+ "version": "3.2.2",
+ "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.2.tgz",
+ "integrity": "sha512-cp5qdmHnu5T8wRg2G3vZZHoJPN14aqQ89SyQ11NpGH5zEMDCclt49rzo+MaRazk7/UeILhAI+/sEtcM+7Fr0nw==",
+ "dev": true
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ },
+ "wildcard": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz",
+ "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==",
+ "dev": true
+ }
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package.json b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package.json
new file mode 100644
index 0000000..1df57f0
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/package.json
@@ -0,0 +1,17 @@
+{
+ "name": "d3-pprof",
+ "version": "2.0.0",
+ "description": "A d3.js bundle for pprof.",
+ "scripts": {
+ "prepare": "webpack --mode production"
+ },
+ "license": "Apache-2.0",
+ "dependencies": {
+ "d3-flame-graph": "^4.1.3",
+ "d3-selection": "^3.0.0"
+ },
+ "devDependencies": {
+ "webpack": "^5.64.4",
+ "webpack-cli": "^4.9.1"
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/update.sh b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/update.sh
new file mode 100755
index 0000000..7076c66
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/update.sh
@@ -0,0 +1,62 @@
+# Copyright 2021 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/usr/bin/env bash
+
+set -eu
+set -o pipefail
+
+D3FLAMEGRAPH_CSS="d3-flamegraph.css"
+
+cd $(dirname $0)
+
+generate_d3_flame_graph_go() {
+ npm install
+ # https://stackoverflow.com/a/21199041/171898
+ local d3_js=$(cat d3.js | sed 's/`/`+"`"+`/g')
+ local d3_css=$(cat "node_modules/d3-flame-graph/dist/${D3FLAMEGRAPH_CSS}")
+
+ cat <<-EOF > d3_flame_graph.go
+// D3.js is a JavaScript library for manipulating documents based on data.
+// https://github.com/d3/d3
+// See D3_LICENSE file for license details
+
+// d3-flame-graph is a D3.js plugin that produces flame graphs from hierarchical data.
+// https://github.com/spiermar/d3-flame-graph
+// See D3_FLAME_GRAPH_LICENSE file for license details
+
+package d3flamegraph
+
+// JSSource returns the d3 and d3-flame-graph JavaScript bundle
+const JSSource = \`
+
+$d3_js
+\`
+
+// CSSSource returns the $D3FLAMEGRAPH_CSS file
+const CSSSource = \`
+$d3_css
+\`
+
+EOF
+ gofmt -w d3_flame_graph.go
+}
+
+get_licenses() {
+ cp node_modules/d3-selection/LICENSE D3_LICENSE
+ cp node_modules/d3-flame-graph/LICENSE D3_FLAME_GRAPH_LICENSE
+}
+
+get_licenses
+generate_d3_flame_graph_go
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/webpack.config.js b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/webpack.config.js
new file mode 100644
index 0000000..71239d9
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/webpack.config.js
@@ -0,0 +1,13 @@
+// Minimal webpack config to package a minified JS bundle (including
+// dependencies) for execution in a <script> tag in the browser.
+module.exports = {
+ entry: './index.js',
+ output: {
+ path: __dirname, // Directory containing this webpack.config.js file.
+ filename: 'd3.js',
+ // Arbitrary; many module formats could be used, just keeping Universal
+ // Module Definition as it's the same as what we used in a previous
+ // version.
+ libraryTarget: 'umd',
+ },
+};
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE
new file mode 100644
index 0000000..35bc174
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2009-2017 Andrea Leofreddi <a.leofreddi@vleo.net>. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are
+permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ 3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+The views and conclusions contained in the software and documentation are those of the
+authors and should not be interpreted as representing official policies, either expressed
+or implied, of Andrea Leofreddi.
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go
new file mode 100644
index 0000000..6ca08ad
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go
@@ -0,0 +1,297 @@
+// SVG pan and zoom library.
+// See copyright notice in string constant below.
+
+package svgpan
+
+// https://github.com/aleofreddi/svgpan
+
+// JSSource returns the svgpan.js file
+const JSSource = `
+/**
+ * SVGPan library 1.2.2
+ * ======================
+ *
+ * Given an unique existing element with id "viewport" (or when missing, the
+ * first g-element), including the library into any SVG adds the following
+ * capabilities:
+ *
+ * - Mouse panning
+ * - Mouse zooming (using the wheel)
+ * - Object dragging
+ *
+ * You can configure the behaviour of the pan/zoom/drag with the variables
+ * listed in the CONFIGURATION section of this file.
+ *
+ * Known issues:
+ *
+ * - Zooming (while panning) on Safari has still some issues
+ *
+ * Releases:
+ *
+ * 1.2.2, Tue Aug 30 17:21:56 CEST 2011, Andrea Leofreddi
+ * - Fixed viewBox on root tag (#7)
+ * - Improved zoom speed (#2)
+ *
+ * 1.2.1, Mon Jul 4 00:33:18 CEST 2011, Andrea Leofreddi
+ * - Fixed a regression with mouse wheel (now working on Firefox 5)
+ * - Working with viewBox attribute (#4)
+ * - Added "use strict;" and fixed resulting warnings (#5)
+ * - Added configuration variables, dragging is disabled by default (#3)
+ *
+ * 1.2, Sat Mar 20 08:42:50 GMT 2010, Zeng Xiaohui
+ * Fixed a bug with browser mouse handler interaction
+ *
+ * 1.1, Wed Feb 3 17:39:33 GMT 2010, Zeng Xiaohui
+ * Updated the zoom code to support the mouse wheel on Safari/Chrome
+ *
+ * 1.0, Andrea Leofreddi
+ * First release
+ *
+ * This code is licensed under the following BSD license:
+ *
+ * Copyright 2009-2017 Andrea Leofreddi <a.leofreddi@vleo.net>. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holder nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS'' AND ANY EXPRESS
+ * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of Andrea Leofreddi.
+ */
+
+"use strict";
+
+/// CONFIGURATION
+/// ====>
+
+var enablePan = 1; // 1 or 0: enable or disable panning (default enabled)
+var enableZoom = 1; // 1 or 0: enable or disable zooming (default enabled)
+var enableDrag = 0; // 1 or 0: enable or disable dragging (default disabled)
+var zoomScale = 0.2; // Zoom sensitivity
+
+/// <====
+/// END OF CONFIGURATION
+
+var root = document.documentElement;
+
+var state = 'none', svgRoot = null, stateTarget, stateOrigin, stateTf;
+
+setupHandlers(root);
+
+/**
+ * Register handlers
+ */
+function setupHandlers(root){
+ setAttributes(root, {
+ "onmouseup" : "handleMouseUp(evt)",
+ "onmousedown" : "handleMouseDown(evt)",
+ "onmousemove" : "handleMouseMove(evt)",
+ //"onmouseout" : "handleMouseUp(evt)", // Decomment this to stop the pan functionality when dragging out of the SVG element
+ });
+
+ if(navigator.userAgent.toLowerCase().indexOf('webkit') >= 0)
+ window.addEventListener('mousewheel', handleMouseWheel, false); // Chrome/Safari
+ else
+ window.addEventListener('DOMMouseScroll', handleMouseWheel, false); // Others
+}
+
+/**
+ * Retrieves the root element for SVG manipulation. The element is then cached into the svgRoot global variable.
+ */
+function getRoot(root) {
+ if(svgRoot == null) {
+ var r = root.getElementById("viewport") ? root.getElementById("viewport") : root.documentElement, t = r;
+
+ while(t != root) {
+ if(t.getAttribute("viewBox")) {
+ setCTM(r, t.getCTM());
+
+ t.removeAttribute("viewBox");
+ }
+
+ t = t.parentNode;
+ }
+
+ svgRoot = r;
+ }
+
+ return svgRoot;
+}
+
+/**
+ * Instance an SVGPoint object with given event coordinates.
+ */
+function getEventPoint(evt) {
+ var p = root.createSVGPoint();
+
+ p.x = evt.clientX;
+ p.y = evt.clientY;
+
+ return p;
+}
+
+/**
+ * Sets the current transform matrix of an element.
+ */
+function setCTM(element, matrix) {
+ var s = "matrix(" + matrix.a + "," + matrix.b + "," + matrix.c + "," + matrix.d + "," + matrix.e + "," + matrix.f + ")";
+
+ element.setAttribute("transform", s);
+}
+
+/**
+ * Dumps a matrix to a string (useful for debug).
+ */
+function dumpMatrix(matrix) {
+ var s = "[ " + matrix.a + ", " + matrix.c + ", " + matrix.e + "\n " + matrix.b + ", " + matrix.d + ", " + matrix.f + "\n 0, 0, 1 ]";
+
+ return s;
+}
+
+/**
+ * Sets attributes of an element.
+ */
+function setAttributes(element, attributes){
+ for (var i in attributes)
+ element.setAttributeNS(null, i, attributes[i]);
+}
+
+/**
+ * Handle mouse wheel event.
+ */
+function handleMouseWheel(evt) {
+ if(!enableZoom)
+ return;
+
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ var delta;
+
+ if(evt.wheelDelta)
+ delta = evt.wheelDelta / 360; // Chrome/Safari
+ else
+ delta = evt.detail / -9; // Mozilla
+
+ var z = Math.pow(1 + zoomScale, delta);
+
+ var g = getRoot(svgDoc);
+
+ var p = getEventPoint(evt);
+
+ p = p.matrixTransform(g.getCTM().inverse());
+
+ // Compute new scale matrix in current mouse position
+ var k = root.createSVGMatrix().translate(p.x, p.y).scale(z).translate(-p.x, -p.y);
+
+ setCTM(g, g.getCTM().multiply(k));
+
+ if(typeof(stateTf) == "undefined")
+ stateTf = g.getCTM().inverse();
+
+ stateTf = stateTf.multiply(k.inverse());
+}
+
+/**
+ * Handle mouse move event.
+ */
+function handleMouseMove(evt) {
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ var g = getRoot(svgDoc);
+
+ if(state == 'pan' && enablePan) {
+ // Pan mode
+ var p = getEventPoint(evt).matrixTransform(stateTf);
+
+ setCTM(g, stateTf.inverse().translate(p.x - stateOrigin.x, p.y - stateOrigin.y));
+ } else if(state == 'drag' && enableDrag) {
+ // Drag mode
+ var p = getEventPoint(evt).matrixTransform(g.getCTM().inverse());
+
+ setCTM(stateTarget, root.createSVGMatrix().translate(p.x - stateOrigin.x, p.y - stateOrigin.y).multiply(g.getCTM().inverse()).multiply(stateTarget.getCTM()));
+
+ stateOrigin = p;
+ }
+}
+
+/**
+ * Handle click event.
+ */
+function handleMouseDown(evt) {
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ var g = getRoot(svgDoc);
+
+ if(
+ evt.target.tagName == "svg"
+ || !enableDrag // Pan anyway when drag is disabled and the user clicked on an element
+ ) {
+ // Pan mode
+ state = 'pan';
+
+ stateTf = g.getCTM().inverse();
+
+ stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
+ } else {
+ // Drag mode
+ state = 'drag';
+
+ stateTarget = evt.target;
+
+ stateTf = g.getCTM().inverse();
+
+ stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
+ }
+}
+
+/**
+ * Handle mouse button release event.
+ */
+function handleMouseUp(evt) {
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ if(state == 'pan' || state == 'drag') {
+ // Quit pan mode
+ state = '';
+ }
+}
+`
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore b/src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore
new file mode 100644
index 0000000..4a8b38f
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore
@@ -0,0 +1,13 @@
+*.o
+*.a
+*.so
+._*
+.nfs.*
+a.out
+*~
+*.orig
+*.rej
+*.exe
+.*.swp
+core
+demangle.test
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE b/src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE
new file mode 100644
index 0000000..d29b372
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2015 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md b/src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md
new file mode 100644
index 0000000..2c01cae
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md
@@ -0,0 +1,3 @@
+# github.com/ianlancetaylor/demangle
+
+A Go package that can be used to demangle C++ and Rust symbol names.
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/SECURITY.md b/src/cmd/vendor/github.com/ianlancetaylor/demangle/SECURITY.md
new file mode 100644
index 0000000..f4edf9e
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/SECURITY.md
@@ -0,0 +1,13 @@
+# Security Policy
+
+## Supported Versions
+
+Security updates are applied only to the latest release.
+
+## Reporting a Vulnerability
+
+If you have discovered a security vulnerability in this project, please report it privately. **Do not disclose it as a public issue.** This gives us time to work with you to fix the issue before public exposure, reducing the chance that the exploit will be used before a patch is released.
+
+Please disclose it at [security advisory](https://github.com/ianlancetaylor/demangle/security/advisories/new).
+
+This project is maintained by volunteers on a reasonable-effort basis. As such, please give us at least 90 days to work on a fix before public exposure.
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go b/src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go
new file mode 100644
index 0000000..cdc98c3
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go
@@ -0,0 +1,4142 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package demangle
+
+import (
+ "fmt"
+ "strings"
+)
+
+// AST is an abstract syntax tree representing a C++ declaration.
+// This is sufficient for the demangler but is by no means a general C++ AST.
+// This abstract syntax tree is only used for C++ symbols, not Rust symbols.
+type AST interface {
+ // Internal method to convert to demangled string.
+ print(*printState)
+
+ // Traverse each element of an AST. If the function returns
+ // false, traversal of children of that element is skipped.
+ Traverse(func(AST) bool)
+
+ // Copy an AST with possible transformations.
+ // If the skip function returns true, no copy is required.
+ // If the copy function returns nil, no copy is required.
+ // The Copy method will do the right thing if copy returns nil
+ // for some components of an AST but not others, so a good
+ // copy function will only return non-nil for AST values that
+ // need to change.
+ // Copy itself returns either a copy or nil.
+ Copy(copy func(AST) AST, skip func(AST) bool) AST
+
+ // Implement the fmt.GoStringer interface.
+ GoString() string
+ goString(indent int, field string) string
+}
+
+// ASTToString returns the demangled name of the AST.
+func ASTToString(a AST, options ...Option) string {
+ tparams := true
+ enclosingParams := true
+ llvmStyle := false
+ max := 0
+ for _, o := range options {
+ switch {
+ case o == NoTemplateParams:
+ tparams = false
+ case o == NoEnclosingParams:
+ enclosingParams = false
+ case o == LLVMStyle:
+ llvmStyle = true
+ case isMaxLength(o):
+ max = maxLength(o)
+ }
+ }
+
+ ps := printState{
+ tparams: tparams,
+ enclosingParams: enclosingParams,
+ llvmStyle: llvmStyle,
+ max: max,
+ }
+ a.print(&ps)
+ s := ps.buf.String()
+ if max > 0 && len(s) > max {
+ s = s[:max]
+ }
+ return s
+}
+
+// The printState type holds information needed to print an AST.
+type printState struct {
+ tparams bool // whether to print template parameters
+ enclosingParams bool // whether to print enclosing parameters
+ llvmStyle bool
+ max int // maximum output length
+
+ buf strings.Builder
+ last byte // Last byte written to buffer.
+
+ // The inner field is a list of items to print for a type
+ // name. This is used by types to implement the inside-out
+ // C++ declaration syntax.
+ inner []AST
+
+ // The printing field is a list of items we are currently
+ // printing. This avoids endless recursion if a substitution
+ // reference creates a cycle in the graph.
+ printing []AST
+}
+
+// writeByte adds a byte to the string being printed.
+func (ps *printState) writeByte(b byte) {
+ ps.last = b
+ ps.buf.WriteByte(b)
+}
+
+// writeString adds a string to the string being printed.
+func (ps *printState) writeString(s string) {
+ if len(s) > 0 {
+ ps.last = s[len(s)-1]
+ }
+ ps.buf.WriteString(s)
+}
+
+// Print an AST.
+func (ps *printState) print(a AST) {
+ if ps.max > 0 && ps.buf.Len() > ps.max {
+ return
+ }
+
+ c := 0
+ for _, v := range ps.printing {
+ if v == a {
+ // We permit the type to appear once, and
+ // return without printing anything if we see
+ // it twice. This is for a case like
+ // _Z6outer2IsEPFilES1_, where the
+ // substitution is printed differently the
+ // second time because the set of inner types
+ // is different.
+ c++
+ if c > 1 {
+ return
+ }
+ }
+ }
+ ps.printing = append(ps.printing, a)
+
+ a.print(ps)
+
+ ps.printing = ps.printing[:len(ps.printing)-1]
+}
+
+// Name is an unqualified name.
+type Name struct {
+ Name string
+}
+
+func (n *Name) print(ps *printState) {
+ ps.writeString(n.Name)
+}
+
+func (n *Name) Traverse(fn func(AST) bool) {
+ fn(n)
+}
+
+func (n *Name) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(n) {
+ return nil
+ }
+ return fn(n)
+}
+
+func (n *Name) GoString() string {
+ return n.goString(0, "Name: ")
+}
+
+func (n *Name) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%s%s", indent, "", field, n.Name)
+}
+
+// Typed is a typed name.
+type Typed struct {
+ Name AST
+ Type AST
+}
+
+func (t *Typed) print(ps *printState) {
+ // We are printing a typed name, so ignore the current set of
+ // inner names to print. Pass down our name as the one to use.
+ holdInner := ps.inner
+ defer func() { ps.inner = holdInner }()
+
+ ps.inner = []AST{t}
+ ps.print(t.Type)
+ if len(ps.inner) > 0 {
+ // The type did not print the name; print it now in
+ // the default location.
+ ps.writeByte(' ')
+ ps.print(t.Name)
+ }
+}
+
+func (t *Typed) printInner(ps *printState) {
+ ps.print(t.Name)
+}
+
+func (t *Typed) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Name.Traverse(fn)
+ t.Type.Traverse(fn)
+ }
+}
+
+func (t *Typed) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ name := t.Name.Copy(fn, skip)
+ typ := t.Type.Copy(fn, skip)
+ if name == nil && typ == nil {
+ return fn(t)
+ }
+ if name == nil {
+ name = t.Name
+ }
+ if typ == nil {
+ typ = t.Type
+ }
+ t = &Typed{Name: name, Type: typ}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *Typed) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *Typed) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTyped:\n%s\n%s", indent, "", field,
+ t.Name.goString(indent+2, "Name: "),
+ t.Type.goString(indent+2, "Type: "))
+}
+
+// Qualified is a name in a scope.
+type Qualified struct {
+ Scope AST
+ Name AST
+
+ // The LocalName field is true if this is parsed as a
+ // <local-name>. We shouldn't really need this, but in some
+ // cases (for the unary sizeof operator) the standard
+ // demangler prints a local name slightly differently. We
+ // keep track of this for compatibility.
+ LocalName bool // A full local name encoding
+}
+
+func (q *Qualified) print(ps *printState) {
+ ps.print(q.Scope)
+ ps.writeString("::")
+ ps.print(q.Name)
+}
+
+func (q *Qualified) Traverse(fn func(AST) bool) {
+ if fn(q) {
+ q.Scope.Traverse(fn)
+ q.Name.Traverse(fn)
+ }
+}
+
+func (q *Qualified) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(q) {
+ return nil
+ }
+ scope := q.Scope.Copy(fn, skip)
+ name := q.Name.Copy(fn, skip)
+ if scope == nil && name == nil {
+ return fn(q)
+ }
+ if scope == nil {
+ scope = q.Scope
+ }
+ if name == nil {
+ name = q.Name
+ }
+ q = &Qualified{Scope: scope, Name: name, LocalName: q.LocalName}
+ if r := fn(q); r != nil {
+ return r
+ }
+ return q
+}
+
+func (q *Qualified) GoString() string {
+ return q.goString(0, "")
+}
+
+func (q *Qualified) goString(indent int, field string) string {
+ s := ""
+ if q.LocalName {
+ s = " LocalName: true"
+ }
+ return fmt.Sprintf("%*s%sQualified:%s\n%s\n%s", indent, "", field,
+ s, q.Scope.goString(indent+2, "Scope: "),
+ q.Name.goString(indent+2, "Name: "))
+}
+
+// Template is a template with arguments.
+type Template struct {
+ Name AST
+ Args []AST
+}
+
+func (t *Template) print(ps *printState) {
+ // Inner types apply to the template as a whole, they don't
+ // cross over into the template.
+ holdInner := ps.inner
+ defer func() { ps.inner = holdInner }()
+
+ ps.inner = nil
+ ps.print(t.Name)
+
+ if !ps.tparams {
+ // Do not print template parameters.
+ return
+ }
+ // We need an extra space after operator<.
+ if ps.last == '<' {
+ ps.writeByte(' ')
+ }
+
+ ps.writeByte('<')
+ first := true
+ for _, a := range t.Args {
+ if ps.isEmpty(a) {
+ continue
+ }
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ first = false
+ }
+ if ps.last == '>' {
+ // Avoid syntactic ambiguity in old versions of C++.
+ ps.writeByte(' ')
+ }
+ ps.writeByte('>')
+}
+
+func (t *Template) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Name.Traverse(fn)
+ for _, a := range t.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (t *Template) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ name := t.Name.Copy(fn, skip)
+ changed := name != nil
+ args := make([]AST, len(t.Args))
+ for i, a := range t.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(t)
+ }
+ if name == nil {
+ name = t.Name
+ }
+ t = &Template{Name: name, Args: args}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *Template) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *Template) goString(indent int, field string) string {
+ var args string
+ if len(t.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range t.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sTemplate (%p):\n%s\n%s", indent, "", field, t,
+ t.Name.goString(indent+2, "Name: "), args)
+}
+
+// TemplateParam is a template parameter. The Template field is
+// filled in while parsing the demangled string. We don't normally
+// see these while printing--they are replaced by the simplify
+// function.
+type TemplateParam struct {
+ Index int
+ Template *Template
+}
+
+func (tp *TemplateParam) print(ps *printState) {
+ if tp.Template == nil {
+ panic("TemplateParam Template field is nil")
+ }
+ if tp.Index >= len(tp.Template.Args) {
+ panic("TemplateParam Index out of bounds")
+ }
+ ps.print(tp.Template.Args[tp.Index])
+}
+
+func (tp *TemplateParam) Traverse(fn func(AST) bool) {
+ fn(tp)
+ // Don't traverse Template--it points elsewhere in the AST.
+}
+
+func (tp *TemplateParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(tp) {
+ return nil
+ }
+ return fn(tp)
+}
+
+func (tp *TemplateParam) GoString() string {
+ return tp.goString(0, "")
+}
+
+func (tp *TemplateParam) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTemplateParam: Template: %p; Index %d", indent, "", field, tp.Template, tp.Index)
+}
+
+// LambdaAuto is a lambda auto parameter.
+type LambdaAuto struct {
+ Index int
+}
+
+func (la *LambdaAuto) print(ps *printState) {
+ // We print the index plus 1 because that is what the standard
+ // demangler does.
+ if ps.llvmStyle {
+ ps.writeString("auto")
+ } else {
+ fmt.Fprintf(&ps.buf, "auto:%d", la.Index+1)
+ }
+}
+
+func (la *LambdaAuto) Traverse(fn func(AST) bool) {
+ fn(la)
+}
+
+func (la *LambdaAuto) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(la) {
+ return nil
+ }
+ return fn(la)
+}
+
+func (la *LambdaAuto) GoString() string {
+ return la.goString(0, "")
+}
+
+func (la *LambdaAuto) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sLambdaAuto: Index %d", indent, "", field, la.Index)
+}
+
+// Qualifiers is an ordered list of type qualifiers.
+type Qualifiers struct {
+ Qualifiers []AST
+}
+
+func (qs *Qualifiers) print(ps *printState) {
+ first := true
+ for _, q := range qs.Qualifiers {
+ if !first {
+ ps.writeByte(' ')
+ }
+ q.print(ps)
+ first = false
+ }
+}
+
+func (qs *Qualifiers) Traverse(fn func(AST) bool) {
+ if fn(qs) {
+ for _, q := range qs.Qualifiers {
+ q.Traverse(fn)
+ }
+ }
+}
+
+func (qs *Qualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(qs) {
+ return nil
+ }
+ changed := false
+ qualifiers := make([]AST, len(qs.Qualifiers))
+ for i, q := range qs.Qualifiers {
+ qc := q.Copy(fn, skip)
+ if qc == nil {
+ qualifiers[i] = q
+ } else {
+ qualifiers[i] = qc
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(qs)
+ }
+ qs = &Qualifiers{Qualifiers: qualifiers}
+ if r := fn(qs); r != nil {
+ return r
+ }
+ return qs
+}
+
+func (qs *Qualifiers) GoString() string {
+ return qs.goString(0, "")
+}
+
+func (qs *Qualifiers) goString(indent int, field string) string {
+ quals := fmt.Sprintf("%*s%s", indent, "", field)
+ for _, q := range qs.Qualifiers {
+ quals += "\n"
+ quals += q.goString(indent+2, "")
+ }
+ return quals
+}
+
+// Qualifier is a single type qualifier.
+type Qualifier struct {
+ Name string // qualifier name: const, volatile, etc.
+ Exprs []AST // can be non-nil for noexcept and throw
+}
+
+func (q *Qualifier) print(ps *printState) {
+ ps.writeString(q.Name)
+ if len(q.Exprs) > 0 {
+ ps.writeByte('(')
+ first := true
+ for _, e := range q.Exprs {
+ if el, ok := e.(*ExprList); ok && len(el.Exprs) == 0 {
+ continue
+ }
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(e)
+ first = false
+ }
+ ps.writeByte(')')
+ }
+}
+
+func (q *Qualifier) Traverse(fn func(AST) bool) {
+ if fn(q) {
+ for _, e := range q.Exprs {
+ e.Traverse(fn)
+ }
+ }
+}
+
+func (q *Qualifier) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(q) {
+ return nil
+ }
+ exprs := make([]AST, len(q.Exprs))
+ changed := false
+ for i, e := range q.Exprs {
+ ec := e.Copy(fn, skip)
+ if ec == nil {
+ exprs[i] = e
+ } else {
+ exprs[i] = ec
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(q)
+ }
+ q = &Qualifier{Name: q.Name, Exprs: exprs}
+ if r := fn(q); r != nil {
+ return r
+ }
+ return q
+}
+
+func (q *Qualifier) GoString() string {
+ return q.goString(0, "Qualifier: ")
+}
+
+func (q *Qualifier) goString(indent int, field string) string {
+ qs := fmt.Sprintf("%*s%s%s", indent, "", field, q.Name)
+ if len(q.Exprs) > 0 {
+ for i, e := range q.Exprs {
+ qs += "\n"
+ qs += e.goString(indent+2, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return qs
+}
+
+// TypeWithQualifiers is a type with standard qualifiers.
+type TypeWithQualifiers struct {
+ Base AST
+ Qualifiers AST
+}
+
+func (twq *TypeWithQualifiers) print(ps *printState) {
+ // Give the base type a chance to print the inner types.
+ ps.inner = append(ps.inner, twq)
+ ps.print(twq.Base)
+ if len(ps.inner) > 0 {
+ // The qualifier wasn't printed by Base.
+ ps.writeByte(' ')
+ ps.print(twq.Qualifiers)
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+// Print qualifiers as an inner type by just printing the qualifiers.
+func (twq *TypeWithQualifiers) printInner(ps *printState) {
+ ps.writeByte(' ')
+ ps.print(twq.Qualifiers)
+}
+
+func (twq *TypeWithQualifiers) Traverse(fn func(AST) bool) {
+ if fn(twq) {
+ twq.Base.Traverse(fn)
+ }
+}
+
+func (twq *TypeWithQualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(twq) {
+ return nil
+ }
+ base := twq.Base.Copy(fn, skip)
+ quals := twq.Qualifiers.Copy(fn, skip)
+ if base == nil && quals == nil {
+ return fn(twq)
+ }
+ if base == nil {
+ base = twq.Base
+ }
+ if quals == nil {
+ quals = twq.Qualifiers
+ }
+ twq = &TypeWithQualifiers{Base: base, Qualifiers: quals}
+ if r := fn(twq); r != nil {
+ return r
+ }
+ return twq
+}
+
+func (twq *TypeWithQualifiers) GoString() string {
+ return twq.goString(0, "")
+}
+
+func (twq *TypeWithQualifiers) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTypeWithQualifiers:\n%s\n%s", indent, "", field,
+ twq.Qualifiers.goString(indent+2, "Qualifiers: "),
+ twq.Base.goString(indent+2, "Base: "))
+}
+
+// MethodWithQualifiers is a method with qualifiers.
+type MethodWithQualifiers struct {
+ Method AST
+ Qualifiers AST
+ RefQualifier string // "" or "&" or "&&"
+}
+
+func (mwq *MethodWithQualifiers) print(ps *printState) {
+ // Give the base type a chance to print the inner types.
+ ps.inner = append(ps.inner, mwq)
+ ps.print(mwq.Method)
+ if len(ps.inner) > 0 {
+ if mwq.Qualifiers != nil {
+ ps.writeByte(' ')
+ ps.print(mwq.Qualifiers)
+ }
+ if mwq.RefQualifier != "" {
+ ps.writeByte(' ')
+ ps.writeString(mwq.RefQualifier)
+ }
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+func (mwq *MethodWithQualifiers) printInner(ps *printState) {
+ if mwq.Qualifiers != nil {
+ ps.writeByte(' ')
+ ps.print(mwq.Qualifiers)
+ }
+ if mwq.RefQualifier != "" {
+ ps.writeByte(' ')
+ ps.writeString(mwq.RefQualifier)
+ }
+}
+
+func (mwq *MethodWithQualifiers) Traverse(fn func(AST) bool) {
+ if fn(mwq) {
+ mwq.Method.Traverse(fn)
+ }
+}
+
+func (mwq *MethodWithQualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(mwq) {
+ return nil
+ }
+ method := mwq.Method.Copy(fn, skip)
+ var quals AST
+ if mwq.Qualifiers != nil {
+ quals = mwq.Qualifiers.Copy(fn, skip)
+ }
+ if method == nil && quals == nil {
+ return fn(mwq)
+ }
+ if method == nil {
+ method = mwq.Method
+ }
+ if quals == nil {
+ quals = mwq.Qualifiers
+ }
+ mwq = &MethodWithQualifiers{Method: method, Qualifiers: quals, RefQualifier: mwq.RefQualifier}
+ if r := fn(mwq); r != nil {
+ return r
+ }
+ return mwq
+}
+
+func (mwq *MethodWithQualifiers) GoString() string {
+ return mwq.goString(0, "")
+}
+
+func (mwq *MethodWithQualifiers) goString(indent int, field string) string {
+ var q string
+ if mwq.Qualifiers != nil {
+ q += "\n" + mwq.Qualifiers.goString(indent+2, "Qualifiers: ")
+ }
+ if mwq.RefQualifier != "" {
+ if q != "" {
+ q += "\n"
+ }
+ q += fmt.Sprintf("%*s%s%s", indent+2, "", "RefQualifier: ", mwq.RefQualifier)
+ }
+ return fmt.Sprintf("%*s%sMethodWithQualifiers:%s\n%s", indent, "", field,
+ q, mwq.Method.goString(indent+2, "Method: "))
+}
+
+// BuiltinType is a builtin type, like "int".
+type BuiltinType struct {
+ Name string
+}
+
+func (bt *BuiltinType) print(ps *printState) {
+ name := bt.Name
+ if ps.llvmStyle && name == "decltype(nullptr)" {
+ name = "std::nullptr_t"
+ }
+ ps.writeString(name)
+}
+
+func (bt *BuiltinType) Traverse(fn func(AST) bool) {
+ fn(bt)
+}
+
+func (bt *BuiltinType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(bt) {
+ return nil
+ }
+ return fn(bt)
+}
+
+func (bt *BuiltinType) GoString() string {
+ return bt.goString(0, "")
+}
+
+func (bt *BuiltinType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sBuiltinType: %s", indent, "", field, bt.Name)
+}
+
+// printBase is common print code for types that are printed with a
+// simple suffix.
+func printBase(ps *printState, qual, base AST) {
+ ps.inner = append(ps.inner, qual)
+ ps.print(base)
+ if len(ps.inner) > 0 {
+ qual.(innerPrinter).printInner(ps)
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+// PointerType is a pointer type.
+type PointerType struct {
+ Base AST
+}
+
+func (pt *PointerType) print(ps *printState) {
+ printBase(ps, pt, pt.Base)
+}
+
+func (pt *PointerType) printInner(ps *printState) {
+ ps.writeString("*")
+}
+
+func (pt *PointerType) Traverse(fn func(AST) bool) {
+ if fn(pt) {
+ pt.Base.Traverse(fn)
+ }
+}
+
+func (pt *PointerType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pt) {
+ return nil
+ }
+ base := pt.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(pt)
+ }
+ pt = &PointerType{Base: base}
+ if r := fn(pt); r != nil {
+ return r
+ }
+ return pt
+}
+
+func (pt *PointerType) GoString() string {
+ return pt.goString(0, "")
+}
+
+func (pt *PointerType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPointerType:\n%s", indent, "", field,
+ pt.Base.goString(indent+2, ""))
+}
+
+// ReferenceType is a reference type.
+type ReferenceType struct {
+ Base AST
+}
+
+func (rt *ReferenceType) print(ps *printState) {
+ printBase(ps, rt, rt.Base)
+}
+
+func (rt *ReferenceType) printInner(ps *printState) {
+ ps.writeString("&")
+}
+
+func (rt *ReferenceType) Traverse(fn func(AST) bool) {
+ if fn(rt) {
+ rt.Base.Traverse(fn)
+ }
+}
+
+func (rt *ReferenceType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(rt) {
+ return nil
+ }
+ base := rt.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(rt)
+ }
+ rt = &ReferenceType{Base: base}
+ if r := fn(rt); r != nil {
+ return r
+ }
+ return rt
+}
+
+func (rt *ReferenceType) GoString() string {
+ return rt.goString(0, "")
+}
+
+func (rt *ReferenceType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sReferenceType:\n%s", indent, "", field,
+ rt.Base.goString(indent+2, ""))
+}
+
+// RvalueReferenceType is an rvalue reference type.
+type RvalueReferenceType struct {
+ Base AST
+}
+
+func (rt *RvalueReferenceType) print(ps *printState) {
+ printBase(ps, rt, rt.Base)
+}
+
+func (rt *RvalueReferenceType) printInner(ps *printState) {
+ ps.writeString("&&")
+}
+
+func (rt *RvalueReferenceType) Traverse(fn func(AST) bool) {
+ if fn(rt) {
+ rt.Base.Traverse(fn)
+ }
+}
+
+func (rt *RvalueReferenceType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(rt) {
+ return nil
+ }
+ base := rt.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(rt)
+ }
+ rt = &RvalueReferenceType{Base: base}
+ if r := fn(rt); r != nil {
+ return r
+ }
+ return rt
+}
+
+func (rt *RvalueReferenceType) GoString() string {
+ return rt.goString(0, "")
+}
+
+func (rt *RvalueReferenceType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sRvalueReferenceType:\n%s", indent, "", field,
+ rt.Base.goString(indent+2, ""))
+}
+
+// ComplexType is a complex type.
+type ComplexType struct {
+ Base AST
+}
+
+func (ct *ComplexType) print(ps *printState) {
+ printBase(ps, ct, ct.Base)
+}
+
+func (ct *ComplexType) printInner(ps *printState) {
+ ps.writeString(" _Complex")
+}
+
+func (ct *ComplexType) Traverse(fn func(AST) bool) {
+ if fn(ct) {
+ ct.Base.Traverse(fn)
+ }
+}
+
+func (ct *ComplexType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ct) {
+ return nil
+ }
+ base := ct.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(ct)
+ }
+ ct = &ComplexType{Base: base}
+ if r := fn(ct); r != nil {
+ return r
+ }
+ return ct
+}
+
+func (ct *ComplexType) GoString() string {
+ return ct.goString(0, "")
+}
+
+func (ct *ComplexType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sComplexType:\n%s", indent, "", field,
+ ct.Base.goString(indent+2, ""))
+}
+
+// ImaginaryType is an imaginary type.
+type ImaginaryType struct {
+ Base AST
+}
+
+func (it *ImaginaryType) print(ps *printState) {
+ printBase(ps, it, it.Base)
+}
+
+func (it *ImaginaryType) printInner(ps *printState) {
+ ps.writeString(" _Imaginary")
+}
+
+func (it *ImaginaryType) Traverse(fn func(AST) bool) {
+ if fn(it) {
+ it.Base.Traverse(fn)
+ }
+}
+
+func (it *ImaginaryType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(it) {
+ return nil
+ }
+ base := it.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(it)
+ }
+ it = &ImaginaryType{Base: base}
+ if r := fn(it); r != nil {
+ return r
+ }
+ return it
+}
+
+func (it *ImaginaryType) GoString() string {
+ return it.goString(0, "")
+}
+
+func (it *ImaginaryType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sImaginaryType:\n%s", indent, "", field,
+ it.Base.goString(indent+2, ""))
+}
+
+// VendorQualifier is a type qualified by a vendor-specific qualifier.
+type VendorQualifier struct {
+ Qualifier AST
+ Type AST
+}
+
+func (vq *VendorQualifier) print(ps *printState) {
+ if ps.llvmStyle {
+ ps.print(vq.Type)
+ vq.printInner(ps)
+ } else {
+ ps.inner = append(ps.inner, vq)
+ ps.print(vq.Type)
+ if len(ps.inner) > 0 {
+ ps.printOneInner(nil)
+ }
+ }
+}
+
+func (vq *VendorQualifier) printInner(ps *printState) {
+ ps.writeByte(' ')
+ ps.print(vq.Qualifier)
+}
+
+func (vq *VendorQualifier) Traverse(fn func(AST) bool) {
+ if fn(vq) {
+ vq.Qualifier.Traverse(fn)
+ vq.Type.Traverse(fn)
+ }
+}
+
+func (vq *VendorQualifier) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(vq) {
+ return nil
+ }
+ qualifier := vq.Qualifier.Copy(fn, skip)
+ typ := vq.Type.Copy(fn, skip)
+ if qualifier == nil && typ == nil {
+ return fn(vq)
+ }
+ if qualifier == nil {
+ qualifier = vq.Qualifier
+ }
+ if typ == nil {
+ typ = vq.Type
+ }
+ vq = &VendorQualifier{Qualifier: qualifier, Type: vq.Type}
+ if r := fn(vq); r != nil {
+ return r
+ }
+ return vq
+}
+
+func (vq *VendorQualifier) GoString() string {
+ return vq.goString(0, "")
+}
+
+func (vq *VendorQualifier) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sVendorQualifier:\n%s\n%s", indent, "", field,
+ vq.Qualifier.goString(indent+2, "Qualifier: "),
+ vq.Type.goString(indent+2, "Type: "))
+}
+
+// ArrayType is an array type.
+type ArrayType struct {
+ Dimension AST
+ Element AST
+}
+
+func (at *ArrayType) print(ps *printState) {
+ // Pass the array type down as an inner type so that we print
+ // multi-dimensional arrays correctly.
+ ps.inner = append(ps.inner, at)
+ ps.print(at.Element)
+ if ln := len(ps.inner); ln > 0 {
+ ps.inner = ps.inner[:ln-1]
+ at.printDimension(ps)
+ }
+}
+
+func (at *ArrayType) printInner(ps *printState) {
+ at.printDimension(ps)
+}
+
+// Print the array dimension.
+func (at *ArrayType) printDimension(ps *printState) {
+ space := " "
+ for len(ps.inner) > 0 {
+ // We haven't gotten to the real type yet. Use
+ // parentheses around that type, except that if it is
+ // an array type we print it as a multi-dimensional
+ // array
+ in := ps.inner[len(ps.inner)-1]
+ if twq, ok := in.(*TypeWithQualifiers); ok {
+ in = twq.Base
+ }
+ if _, ok := in.(*ArrayType); ok {
+ if in == ps.inner[len(ps.inner)-1] {
+ space = ""
+ }
+ ps.printOneInner(nil)
+ } else {
+ ps.writeString(" (")
+ ps.printInner(false)
+ ps.writeByte(')')
+ }
+ }
+ ps.writeString(space)
+ ps.writeByte('[')
+ ps.print(at.Dimension)
+ ps.writeByte(']')
+}
+
+func (at *ArrayType) Traverse(fn func(AST) bool) {
+ if fn(at) {
+ at.Dimension.Traverse(fn)
+ at.Element.Traverse(fn)
+ }
+}
+
+func (at *ArrayType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(at) {
+ return nil
+ }
+ dimension := at.Dimension.Copy(fn, skip)
+ element := at.Element.Copy(fn, skip)
+ if dimension == nil && element == nil {
+ return fn(at)
+ }
+ if dimension == nil {
+ dimension = at.Dimension
+ }
+ if element == nil {
+ element = at.Element
+ }
+ at = &ArrayType{Dimension: dimension, Element: element}
+ if r := fn(at); r != nil {
+ return r
+ }
+ return at
+}
+
+func (at *ArrayType) GoString() string {
+ return at.goString(0, "")
+}
+
+func (at *ArrayType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sArrayType:\n%s\n%s", indent, "", field,
+ at.Dimension.goString(indent+2, "Dimension: "),
+ at.Element.goString(indent+2, "Element: "))
+}
+
+// FunctionType is a function type.
+type FunctionType struct {
+ Return AST
+ Args []AST
+
+ // The forLocalName field reports whether this FunctionType
+ // was created for a local name. With the default GNU demangling
+ // output we don't print the return type in that case.
+ ForLocalName bool
+}
+
+func (ft *FunctionType) print(ps *printState) {
+ retType := ft.Return
+ if ft.ForLocalName && (!ps.enclosingParams || !ps.llvmStyle) {
+ retType = nil
+ }
+ if retType != nil {
+ // Pass the return type as an inner type in order to
+ // print the arguments in the right location.
+ ps.inner = append(ps.inner, ft)
+ ps.print(retType)
+ if len(ps.inner) == 0 {
+ // Everything was printed.
+ return
+ }
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ ps.writeByte(' ')
+ }
+ ft.printArgs(ps)
+}
+
+func (ft *FunctionType) printInner(ps *printState) {
+ ft.printArgs(ps)
+}
+
+// printArgs prints the arguments of a function type. It looks at the
+// inner types for spacing.
+func (ft *FunctionType) printArgs(ps *printState) {
+ paren := false
+ space := false
+ for i := len(ps.inner) - 1; i >= 0; i-- {
+ switch ps.inner[i].(type) {
+ case *PointerType, *ReferenceType, *RvalueReferenceType:
+ paren = true
+ case *TypeWithQualifiers, *ComplexType, *ImaginaryType, *PtrMem:
+ space = true
+ paren = true
+ }
+ if paren {
+ break
+ }
+ }
+
+ if paren {
+ if !space && (ps.last != '(' && ps.last != '*') {
+ space = true
+ }
+ if space && ps.last != ' ' {
+ ps.writeByte(' ')
+ }
+ ps.writeByte('(')
+ }
+
+ save := ps.printInner(true)
+
+ if paren {
+ ps.writeByte(')')
+ }
+
+ ps.writeByte('(')
+ if !ft.ForLocalName || ps.enclosingParams {
+ first := true
+ for _, a := range ft.Args {
+ if ps.isEmpty(a) {
+ continue
+ }
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ first = false
+ }
+ }
+ ps.writeByte(')')
+
+ ps.inner = save
+ ps.printInner(false)
+}
+
+func (ft *FunctionType) Traverse(fn func(AST) bool) {
+ if fn(ft) {
+ if ft.Return != nil {
+ ft.Return.Traverse(fn)
+ }
+ for _, a := range ft.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (ft *FunctionType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ft) {
+ return nil
+ }
+ changed := false
+ var ret AST
+ if ft.Return != nil {
+ ret = ft.Return.Copy(fn, skip)
+ if ret == nil {
+ ret = ft.Return
+ } else {
+ changed = true
+ }
+ }
+ args := make([]AST, len(ft.Args))
+ for i, a := range ft.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(ft)
+ }
+ ft = &FunctionType{
+ Return: ret,
+ Args: args,
+ ForLocalName: ft.ForLocalName,
+ }
+ if r := fn(ft); r != nil {
+ return r
+ }
+ return ft
+}
+
+func (ft *FunctionType) GoString() string {
+ return ft.goString(0, "")
+}
+
+func (ft *FunctionType) goString(indent int, field string) string {
+ var forLocalName string
+ if ft.ForLocalName {
+ forLocalName = " ForLocalName: true"
+ }
+ var r string
+ if ft.Return == nil {
+ r = fmt.Sprintf("%*sReturn: nil", indent+2, "")
+ } else {
+ r = ft.Return.goString(indent+2, "Return: ")
+ }
+ var args string
+ if len(ft.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range ft.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sFunctionType:%s\n%s\n%s", indent, "", field,
+ forLocalName, r, args)
+}
+
+// FunctionParam is a parameter of a function, used for last-specified
+// return type in a closure.
+type FunctionParam struct {
+ Index int
+}
+
+func (fp *FunctionParam) print(ps *printState) {
+ if fp.Index == 0 {
+ ps.writeString("this")
+ } else if ps.llvmStyle {
+ if fp.Index == 1 {
+ ps.writeString("fp")
+ } else {
+ fmt.Fprintf(&ps.buf, "fp%d", fp.Index-2)
+ }
+ } else {
+ fmt.Fprintf(&ps.buf, "{parm#%d}", fp.Index)
+ }
+}
+
+func (fp *FunctionParam) Traverse(fn func(AST) bool) {
+ fn(fp)
+}
+
+func (fp *FunctionParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(fp) {
+ return nil
+ }
+ return fn(fp)
+}
+
+func (fp *FunctionParam) GoString() string {
+ return fp.goString(0, "")
+}
+
+func (fp *FunctionParam) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sFunctionParam: %d", indent, "", field, fp.Index)
+}
+
+// PtrMem is a pointer-to-member expression.
+type PtrMem struct {
+ Class AST
+ Member AST
+}
+
+func (pm *PtrMem) print(ps *printState) {
+ ps.inner = append(ps.inner, pm)
+ ps.print(pm.Member)
+ if len(ps.inner) > 0 {
+ ps.printOneInner(nil)
+ }
+}
+
+func (pm *PtrMem) printInner(ps *printState) {
+ if ps.last != '(' {
+ ps.writeByte(' ')
+ }
+ ps.print(pm.Class)
+ ps.writeString("::*")
+}
+
+func (pm *PtrMem) Traverse(fn func(AST) bool) {
+ if fn(pm) {
+ pm.Class.Traverse(fn)
+ pm.Member.Traverse(fn)
+ }
+}
+
+func (pm *PtrMem) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pm) {
+ return nil
+ }
+ class := pm.Class.Copy(fn, skip)
+ member := pm.Member.Copy(fn, skip)
+ if class == nil && member == nil {
+ return fn(pm)
+ }
+ if class == nil {
+ class = pm.Class
+ }
+ if member == nil {
+ member = pm.Member
+ }
+ pm = &PtrMem{Class: class, Member: member}
+ if r := fn(pm); r != nil {
+ return r
+ }
+ return pm
+}
+
+func (pm *PtrMem) GoString() string {
+ return pm.goString(0, "")
+}
+
+func (pm *PtrMem) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPtrMem:\n%s\n%s", indent, "", field,
+ pm.Class.goString(indent+2, "Class: "),
+ pm.Member.goString(indent+2, "Member: "))
+}
+
+// FixedType is a fixed numeric type of unknown size.
+type FixedType struct {
+ Base AST
+ Accum bool
+ Sat bool
+}
+
+func (ft *FixedType) print(ps *printState) {
+ if ft.Sat {
+ ps.writeString("_Sat ")
+ }
+ if bt, ok := ft.Base.(*BuiltinType); ok && bt.Name == "int" {
+ // The standard demangler skips printing "int".
+ } else {
+ ps.print(ft.Base)
+ ps.writeByte(' ')
+ }
+ if ft.Accum {
+ ps.writeString("_Accum")
+ } else {
+ ps.writeString("_Fract")
+ }
+}
+
+func (ft *FixedType) Traverse(fn func(AST) bool) {
+ if fn(ft) {
+ ft.Base.Traverse(fn)
+ }
+}
+
+func (ft *FixedType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ft) {
+ return nil
+ }
+ base := ft.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(ft)
+ }
+ ft = &FixedType{Base: base, Accum: ft.Accum, Sat: ft.Sat}
+ if r := fn(ft); r != nil {
+ return r
+ }
+ return ft
+}
+
+func (ft *FixedType) GoString() string {
+ return ft.goString(0, "")
+}
+
+func (ft *FixedType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sFixedType: Accum: %t; Sat: %t\n%s", indent, "", field,
+ ft.Accum, ft.Sat,
+ ft.Base.goString(indent+2, "Base: "))
+}
+
+// BinaryFP is a binary floating-point type.
+type BinaryFP struct {
+ Bits int
+}
+
+func (bfp *BinaryFP) print(ps *printState) {
+ fmt.Fprintf(&ps.buf, "_Float%d", bfp.Bits)
+}
+
+func (bfp *BinaryFP) Traverse(fn func(AST) bool) {
+ fn(bfp)
+}
+
+func (bfp *BinaryFP) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(bfp) {
+ return nil
+ }
+ return fn(bfp)
+}
+
+func (bfp *BinaryFP) GoString() string {
+ return bfp.goString(0, "")
+}
+
+func (bfp *BinaryFP) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sBinaryFP: %d", indent, "", field, bfp.Bits)
+}
+
+// VectorType is a vector type.
+type VectorType struct {
+ Dimension AST
+ Base AST
+}
+
+func (vt *VectorType) print(ps *printState) {
+ ps.inner = append(ps.inner, vt)
+ ps.print(vt.Base)
+ if len(ps.inner) > 0 {
+ ps.printOneInner(nil)
+ }
+}
+
+func (vt *VectorType) printInner(ps *printState) {
+ end := byte(')')
+ if ps.llvmStyle {
+ ps.writeString(" vector[")
+ end = ']'
+ } else {
+ ps.writeString(" __vector(")
+ }
+ ps.print(vt.Dimension)
+ ps.writeByte(end)
+}
+
+func (vt *VectorType) Traverse(fn func(AST) bool) {
+ if fn(vt) {
+ vt.Dimension.Traverse(fn)
+ vt.Base.Traverse(fn)
+ }
+}
+
+func (vt *VectorType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(vt) {
+ return nil
+ }
+ dimension := vt.Dimension.Copy(fn, skip)
+ base := vt.Base.Copy(fn, skip)
+ if dimension == nil && base == nil {
+ return fn(vt)
+ }
+ if dimension == nil {
+ dimension = vt.Dimension
+ }
+ if base == nil {
+ base = vt.Base
+ }
+ vt = &VectorType{Dimension: dimension, Base: base}
+ if r := fn(vt); r != nil {
+ return r
+ }
+ return vt
+}
+
+func (vt *VectorType) GoString() string {
+ return vt.goString(0, "")
+}
+
+func (vt *VectorType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sVectorType:\n%s\n%s", indent, "", field,
+ vt.Dimension.goString(indent+2, "Dimension: "),
+ vt.Base.goString(indent+2, "Base: "))
+}
+
+// ElaboratedType is an elaborated struct/union/enum type.
+type ElaboratedType struct {
+ Kind string
+ Type AST
+}
+
+func (et *ElaboratedType) print(ps *printState) {
+ ps.writeString(et.Kind)
+ ps.writeString(" ")
+ et.Type.print(ps)
+}
+
+func (et *ElaboratedType) Traverse(fn func(AST) bool) {
+ if fn(et) {
+ et.Type.Traverse(fn)
+ }
+}
+
+func (et *ElaboratedType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(et) {
+ return nil
+ }
+ typ := et.Type.Copy(fn, skip)
+ if typ == nil {
+ return fn(et)
+ }
+ et = &ElaboratedType{Kind: et.Kind, Type: typ}
+ if r := fn(et); r != nil {
+ return r
+ }
+ return et
+}
+
+func (et *ElaboratedType) GoString() string {
+ return et.goString(0, "")
+}
+
+func (et *ElaboratedType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sElaboratedtype: Kind: %s\n%s", indent, "", field,
+ et.Kind, et.Type.goString(indent+2, "Expr: "))
+}
+
+// Decltype is the decltype operator.
+type Decltype struct {
+ Expr AST
+}
+
+func (dt *Decltype) print(ps *printState) {
+ ps.writeString("decltype")
+ if !ps.llvmStyle {
+ ps.writeString(" ")
+ }
+ ps.writeString("(")
+ ps.print(dt.Expr)
+ ps.writeByte(')')
+}
+
+func (dt *Decltype) Traverse(fn func(AST) bool) {
+ if fn(dt) {
+ dt.Expr.Traverse(fn)
+ }
+}
+
+func (dt *Decltype) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(dt) {
+ return nil
+ }
+ expr := dt.Expr.Copy(fn, skip)
+ if expr == nil {
+ return fn(dt)
+ }
+ dt = &Decltype{Expr: expr}
+ if r := fn(dt); r != nil {
+ return r
+ }
+ return dt
+}
+
+func (dt *Decltype) GoString() string {
+ return dt.goString(0, "")
+}
+
+func (dt *Decltype) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sDecltype:\n%s", indent, "", field,
+ dt.Expr.goString(indent+2, "Expr: "))
+}
+
+// Operator is an operator.
+type Operator struct {
+ Name string
+}
+
+func (op *Operator) print(ps *printState) {
+ ps.writeString("operator")
+ if isLower(op.Name[0]) {
+ ps.writeByte(' ')
+ }
+ n := op.Name
+ n = strings.TrimSuffix(n, " ")
+ ps.writeString(n)
+}
+
+func (op *Operator) Traverse(fn func(AST) bool) {
+ fn(op)
+}
+
+func (op *Operator) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(op) {
+ return nil
+ }
+ return fn(op)
+}
+
+func (op *Operator) GoString() string {
+ return op.goString(0, "")
+}
+
+func (op *Operator) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sOperator: %s", indent, "", field, op.Name)
+}
+
+// Constructor is a constructor.
+type Constructor struct {
+ Name AST
+ Base AST // base class of inheriting constructor
+}
+
+func (c *Constructor) print(ps *printState) {
+ ps.print(c.Name)
+ // We don't include the base class in the demangled string.
+}
+
+func (c *Constructor) Traverse(fn func(AST) bool) {
+ if fn(c) {
+ c.Name.Traverse(fn)
+ if c.Base != nil {
+ c.Base.Traverse(fn)
+ }
+ }
+}
+
+func (c *Constructor) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(c) {
+ return nil
+ }
+ name := c.Name.Copy(fn, skip)
+ var base AST
+ if c.Base != nil {
+ base = c.Base.Copy(fn, skip)
+ }
+ if name == nil && base == nil {
+ return fn(c)
+ }
+ if name == nil {
+ name = c.Name
+ }
+ if base == nil {
+ base = c.Base
+ }
+ c = &Constructor{Name: name, Base: base}
+ if r := fn(c); r != nil {
+ return r
+ }
+ return c
+}
+
+func (c *Constructor) GoString() string {
+ return c.goString(0, "")
+}
+
+func (c *Constructor) goString(indent int, field string) string {
+ var sb strings.Builder
+ fmt.Fprintf(&sb, "%*s%sConstructor:\n", indent, "", field)
+ if c.Base != nil {
+ fmt.Fprintf(&sb, "%s\n", c.Base.goString(indent+2, "Base: "))
+ }
+ fmt.Fprintf(&sb, "%s", c.Name.goString(indent+2, "Name: "))
+ return sb.String()
+}
+
+// Destructor is a destructor.
+type Destructor struct {
+ Name AST
+}
+
+func (d *Destructor) print(ps *printState) {
+ ps.writeByte('~')
+ ps.print(d.Name)
+}
+
+func (d *Destructor) Traverse(fn func(AST) bool) {
+ if fn(d) {
+ d.Name.Traverse(fn)
+ }
+}
+
+func (d *Destructor) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(d) {
+ return nil
+ }
+ name := d.Name.Copy(fn, skip)
+ if name == nil {
+ return fn(d)
+ }
+ d = &Destructor{Name: name}
+ if r := fn(d); r != nil {
+ return r
+ }
+ return d
+}
+
+func (d *Destructor) GoString() string {
+ return d.goString(0, "")
+}
+
+func (d *Destructor) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sDestructor:\n%s", indent, "", field, d.Name.goString(indent+2, "Name: "))
+}
+
+// GlobalCDtor is a global constructor or destructor.
+type GlobalCDtor struct {
+ Ctor bool
+ Key AST
+}
+
+func (gcd *GlobalCDtor) print(ps *printState) {
+ ps.writeString("global ")
+ if gcd.Ctor {
+ ps.writeString("constructors")
+ } else {
+ ps.writeString("destructors")
+ }
+ ps.writeString(" keyed to ")
+ ps.print(gcd.Key)
+}
+
+func (gcd *GlobalCDtor) Traverse(fn func(AST) bool) {
+ if fn(gcd) {
+ gcd.Key.Traverse(fn)
+ }
+}
+
+func (gcd *GlobalCDtor) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(gcd) {
+ return nil
+ }
+ key := gcd.Key.Copy(fn, skip)
+ if key == nil {
+ return fn(gcd)
+ }
+ gcd = &GlobalCDtor{Ctor: gcd.Ctor, Key: key}
+ if r := fn(gcd); r != nil {
+ return r
+ }
+ return gcd
+}
+
+func (gcd *GlobalCDtor) GoString() string {
+ return gcd.goString(0, "")
+}
+
+func (gcd *GlobalCDtor) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sGlobalCDtor: Ctor: %t\n%s", indent, "", field,
+ gcd.Ctor, gcd.Key.goString(indent+2, "Key: "))
+}
+
+// TaggedName is a name with an ABI tag.
+type TaggedName struct {
+ Name AST
+ Tag AST
+}
+
+func (t *TaggedName) print(ps *printState) {
+ ps.print(t.Name)
+ ps.writeString("[abi:")
+ ps.print(t.Tag)
+ ps.writeByte(']')
+}
+
+func (t *TaggedName) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Name.Traverse(fn)
+ t.Tag.Traverse(fn)
+ }
+}
+
+func (t *TaggedName) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ name := t.Name.Copy(fn, skip)
+ tag := t.Tag.Copy(fn, skip)
+ if name == nil && tag == nil {
+ return fn(t)
+ }
+ if name == nil {
+ name = t.Name
+ }
+ if tag == nil {
+ tag = t.Tag
+ }
+ t = &TaggedName{Name: name, Tag: tag}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *TaggedName) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *TaggedName) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTaggedName:\n%s\n%s", indent, "", field,
+ t.Name.goString(indent+2, "Name: "),
+ t.Tag.goString(indent+2, "Tag: "))
+}
+
+// PackExpansion is a pack expansion. The Pack field may be nil.
+type PackExpansion struct {
+ Base AST
+ Pack *ArgumentPack
+}
+
+func (pe *PackExpansion) print(ps *printState) {
+ // We normally only get here if the simplify function was
+ // unable to locate and expand the pack.
+ if pe.Pack == nil {
+ if ps.llvmStyle {
+ ps.print(pe.Base)
+ } else {
+ parenthesize(ps, pe.Base)
+ ps.writeString("...")
+ }
+ } else {
+ ps.print(pe.Base)
+ }
+}
+
+func (pe *PackExpansion) Traverse(fn func(AST) bool) {
+ if fn(pe) {
+ pe.Base.Traverse(fn)
+ // Don't traverse Template--it points elsewhere in the AST.
+ }
+}
+
+func (pe *PackExpansion) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pe) {
+ return nil
+ }
+ base := pe.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(pe)
+ }
+ pe = &PackExpansion{Base: base, Pack: pe.Pack}
+ if r := fn(pe); r != nil {
+ return r
+ }
+ return pe
+}
+
+func (pe *PackExpansion) GoString() string {
+ return pe.goString(0, "")
+}
+
+func (pe *PackExpansion) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPackExpansion: Pack: %p\n%s", indent, "", field,
+ pe.Pack, pe.Base.goString(indent+2, "Base: "))
+}
+
+// ArgumentPack is an argument pack.
+type ArgumentPack struct {
+ Args []AST
+}
+
+func (ap *ArgumentPack) print(ps *printState) {
+ for i, a := range ap.Args {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ }
+}
+
+func (ap *ArgumentPack) Traverse(fn func(AST) bool) {
+ if fn(ap) {
+ for _, a := range ap.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (ap *ArgumentPack) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ap) {
+ return nil
+ }
+ args := make([]AST, len(ap.Args))
+ changed := false
+ for i, a := range ap.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(ap)
+ }
+ ap = &ArgumentPack{Args: args}
+ if r := fn(ap); r != nil {
+ return r
+ }
+ return ap
+}
+
+func (ap *ArgumentPack) GoString() string {
+ return ap.goString(0, "")
+}
+
+func (ap *ArgumentPack) goString(indent int, field string) string {
+ if len(ap.Args) == 0 {
+ return fmt.Sprintf("%*s%sArgumentPack: nil", indent, "", field)
+ }
+ s := fmt.Sprintf("%*s%sArgumentPack:", indent, "", field)
+ for i, a := range ap.Args {
+ s += "\n"
+ s += a.goString(indent+2, fmt.Sprintf("%d: ", i))
+ }
+ return s
+}
+
+// SizeofPack is the sizeof operator applied to an argument pack.
+type SizeofPack struct {
+ Pack *ArgumentPack
+}
+
+func (sp *SizeofPack) print(ps *printState) {
+ if ps.llvmStyle {
+ ps.writeString("sizeof...(")
+ ps.print(sp.Pack)
+ ps.writeByte(')')
+ } else {
+ ps.writeString(fmt.Sprintf("%d", len(sp.Pack.Args)))
+ }
+}
+
+func (sp *SizeofPack) Traverse(fn func(AST) bool) {
+ fn(sp)
+ // Don't traverse the pack--it points elsewhere in the AST.
+}
+
+func (sp *SizeofPack) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(sp) {
+ return nil
+ }
+ sp = &SizeofPack{Pack: sp.Pack}
+ if r := fn(sp); r != nil {
+ return r
+ }
+ return sp
+}
+
+func (sp *SizeofPack) GoString() string {
+ return sp.goString(0, "")
+}
+
+func (sp *SizeofPack) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sSizeofPack: Pack: %p", indent, "", field, sp.Pack)
+}
+
+// SizeofArgs is the size of a captured template parameter pack from
+// an alias template.
+type SizeofArgs struct {
+ Args []AST
+}
+
+func (sa *SizeofArgs) print(ps *printState) {
+ c := 0
+ for _, a := range sa.Args {
+ if ap, ok := a.(*ArgumentPack); ok {
+ c += len(ap.Args)
+ } else if el, ok := a.(*ExprList); ok {
+ c += len(el.Exprs)
+ } else {
+ c++
+ }
+ }
+ ps.writeString(fmt.Sprintf("%d", c))
+}
+
+func (sa *SizeofArgs) Traverse(fn func(AST) bool) {
+ if fn(sa) {
+ for _, a := range sa.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (sa *SizeofArgs) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(sa) {
+ return nil
+ }
+ changed := false
+ args := make([]AST, len(sa.Args))
+ for i, a := range sa.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(sa)
+ }
+ sa = &SizeofArgs{Args: args}
+ if r := fn(sa); r != nil {
+ return r
+ }
+ return sa
+}
+
+func (sa *SizeofArgs) GoString() string {
+ return sa.goString(0, "")
+}
+
+func (sa *SizeofArgs) goString(indent int, field string) string {
+ var args string
+ if len(sa.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range sa.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sSizeofArgs:\n%s", indent, "", field, args)
+}
+
+// TemplateParamName is the name of a template parameter that the
+// demangler introduced for a lambda that has explicit template
+// parameters. This is a prefix with an index.
+type TemplateParamName struct {
+ Prefix string
+ Index int
+}
+
+func (tpn *TemplateParamName) print(ps *printState) {
+ ps.writeString(tpn.Prefix)
+ if tpn.Index > 0 {
+ ps.writeString(fmt.Sprintf("%d", tpn.Index-1))
+ }
+}
+
+func (tpn *TemplateParamName) Traverse(fn func(AST) bool) {
+ fn(tpn)
+}
+
+func (tpn *TemplateParamName) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(tpn) {
+ return nil
+ }
+ return fn(tpn)
+}
+
+func (tpn *TemplateParamName) GoString() string {
+ return tpn.goString(0, "")
+}
+
+func (tpn *TemplateParamName) goString(indent int, field string) string {
+ name := tpn.Prefix
+ if tpn.Index > 0 {
+ name += fmt.Sprintf("%d", tpn.Index-1)
+ }
+ return fmt.Sprintf("%*s%sTemplateParamName: %s", indent, "", field, name)
+}
+
+// TypeTemplateParam is a type template parameter that appears in a
+// lambda with explicit template parameters.
+type TypeTemplateParam struct {
+ Name AST
+}
+
+func (ttp *TypeTemplateParam) print(ps *printState) {
+ ps.writeString("typename ")
+ ps.printInner(false)
+ ps.print(ttp.Name)
+}
+
+func (ttp *TypeTemplateParam) Traverse(fn func(AST) bool) {
+ if fn(ttp) {
+ ttp.Name.Traverse(fn)
+ }
+}
+
+func (ttp *TypeTemplateParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ttp) {
+ return nil
+ }
+ name := ttp.Name.Copy(fn, skip)
+ if name == nil {
+ return fn(ttp)
+ }
+ ttp = &TypeTemplateParam{Name: name}
+ if r := fn(ttp); r != nil {
+ return r
+ }
+ return ttp
+}
+
+func (ttp *TypeTemplateParam) GoString() string {
+ return ttp.goString(0, "")
+}
+
+func (ttp *TypeTemplateParam) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTypeTemplateParam:\n%s", indent, "", field,
+ ttp.Name.goString(indent+2, "Name"))
+}
+
+// NonTypeTemplateParam is a non-type template parameter that appears
+// in a lambda with explicit template parameters.
+type NonTypeTemplateParam struct {
+ Name AST
+ Type AST
+}
+
+func (nttp *NonTypeTemplateParam) print(ps *printState) {
+ ps.inner = append(ps.inner, nttp)
+ ps.print(nttp.Type)
+ if len(ps.inner) > 0 {
+ ps.writeByte(' ')
+ ps.print(nttp.Name)
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+func (nttp *NonTypeTemplateParam) printInner(ps *printState) {
+ ps.print(nttp.Name)
+}
+
+func (nttp *NonTypeTemplateParam) Traverse(fn func(AST) bool) {
+ if fn(nttp) {
+ nttp.Name.Traverse(fn)
+ nttp.Type.Traverse(fn)
+ }
+}
+
+func (nttp *NonTypeTemplateParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(nttp) {
+ return nil
+ }
+ name := nttp.Name.Copy(fn, skip)
+ typ := nttp.Type.Copy(fn, skip)
+ if name == nil && typ == nil {
+ return fn(nttp)
+ }
+ if name == nil {
+ name = nttp.Name
+ }
+ if typ == nil {
+ typ = nttp.Type
+ }
+ nttp = &NonTypeTemplateParam{Name: name, Type: typ}
+ if r := fn(nttp); r != nil {
+ return r
+ }
+ return nttp
+}
+
+func (nttp *NonTypeTemplateParam) GoString() string {
+ return nttp.goString(0, "")
+}
+
+func (nttp *NonTypeTemplateParam) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sNonTypeTemplateParam:\n%s\n%s", indent, "", field,
+ nttp.Name.goString(indent+2, "Name: "),
+ nttp.Type.goString(indent+2, "Type: "))
+}
+
+// TemplateTemplateParam is a template template parameter that appears
+// in a lambda with explicit template parameters.
+type TemplateTemplateParam struct {
+ Name AST
+ Params []AST
+}
+
+func (ttp *TemplateTemplateParam) print(ps *printState) {
+ ps.writeString("template<")
+ for i, param := range ttp.Params {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(param)
+ }
+ ps.writeString("> typename ")
+ ps.print(ttp.Name)
+}
+
+func (ttp *TemplateTemplateParam) Traverse(fn func(AST) bool) {
+ if fn(ttp) {
+ ttp.Name.Traverse(fn)
+ for _, param := range ttp.Params {
+ param.Traverse(fn)
+ }
+ }
+}
+
+func (ttp *TemplateTemplateParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ttp) {
+ return nil
+ }
+
+ changed := false
+
+ name := ttp.Name.Copy(fn, skip)
+ if name == nil {
+ name = ttp.Name
+ } else {
+ changed = true
+ }
+
+ params := make([]AST, len(ttp.Params))
+ for i, p := range ttp.Params {
+ pc := p.Copy(fn, skip)
+ if pc == nil {
+ params[i] = p
+ } else {
+ params[i] = pc
+ changed = true
+ }
+ }
+
+ if !changed {
+ return fn(ttp)
+ }
+
+ ttp = &TemplateTemplateParam{
+ Name: name,
+ Params: params,
+ }
+ if r := fn(ttp); r != nil {
+ return r
+ }
+ return ttp
+}
+
+func (ttp *TemplateTemplateParam) GoString() string {
+ return ttp.goString(0, "")
+}
+
+func (ttp *TemplateTemplateParam) goString(indent int, field string) string {
+ var params strings.Builder
+ fmt.Fprintf(&params, "%*sParams:", indent+2, "")
+ for i, p := range ttp.Params {
+ params.WriteByte('\n')
+ params.WriteString(p.goString(indent+4, fmt.Sprintf("%d: ", i)))
+ }
+ return fmt.Sprintf("%*s%sTemplateTemplateParam:\n%s\n%s", indent, "", field,
+ ttp.Name.goString(indent+2, "Name: "),
+ params.String())
+}
+
+// TemplateParamPack is a template parameter pack that appears in a
+// lambda with explicit template parameters.
+type TemplateParamPack struct {
+ Param AST
+}
+
+func (tpp *TemplateParamPack) print(ps *printState) {
+ holdInner := ps.inner
+ defer func() { ps.inner = holdInner }()
+
+ ps.inner = []AST{tpp}
+ if nttp, ok := tpp.Param.(*NonTypeTemplateParam); ok {
+ ps.print(nttp.Type)
+ } else {
+ ps.print(tpp.Param)
+ }
+ if len(ps.inner) > 0 {
+ ps.writeString("...")
+ }
+}
+
+func (tpp *TemplateParamPack) printInner(ps *printState) {
+ ps.writeString("...")
+ if nttp, ok := tpp.Param.(*NonTypeTemplateParam); ok {
+ ps.print(nttp.Name)
+ }
+}
+
+func (tpp *TemplateParamPack) Traverse(fn func(AST) bool) {
+ if fn(tpp) {
+ tpp.Param.Traverse(fn)
+ }
+}
+
+func (tpp *TemplateParamPack) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(tpp) {
+ return nil
+ }
+ param := tpp.Param.Copy(fn, skip)
+ if param == nil {
+ return fn(tpp)
+ }
+ tpp = &TemplateParamPack{Param: param}
+ if r := fn(tpp); r != nil {
+ return r
+ }
+ return tpp
+}
+
+func (tpp *TemplateParamPack) GoString() string {
+ return tpp.goString(0, "")
+}
+
+func (tpp *TemplateParamPack) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTemplateParamPack:\n%s", indent, "", field,
+ tpp.Param.goString(indent+2, "Param: "))
+}
+
+// Cast is a type cast.
+type Cast struct {
+ To AST
+}
+
+func (c *Cast) print(ps *printState) {
+ ps.writeString("operator ")
+ ps.print(c.To)
+}
+
+func (c *Cast) Traverse(fn func(AST) bool) {
+ if fn(c) {
+ c.To.Traverse(fn)
+ }
+}
+
+func (c *Cast) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(c) {
+ return nil
+ }
+ to := c.To.Copy(fn, skip)
+ if to == nil {
+ return fn(c)
+ }
+ c = &Cast{To: to}
+ if r := fn(c); r != nil {
+ return r
+ }
+ return c
+}
+
+func (c *Cast) GoString() string {
+ return c.goString(0, "")
+}
+
+func (c *Cast) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sCast\n%s", indent, "", field,
+ c.To.goString(indent+2, "To: "))
+}
+
+// The parenthesize function prints the string for val, wrapped in
+// parentheses if necessary.
+func parenthesize(ps *printState, val AST) {
+ paren := false
+ switch v := val.(type) {
+ case *Name, *InitializerList:
+ case *FunctionParam:
+ if ps.llvmStyle {
+ paren = true
+ }
+ case *Qualified:
+ if v.LocalName {
+ paren = true
+ }
+ default:
+ paren = true
+ }
+ if paren {
+ ps.writeByte('(')
+ }
+ ps.print(val)
+ if paren {
+ ps.writeByte(')')
+ }
+}
+
+// Nullary is an operator in an expression with no arguments, such as
+// throw.
+type Nullary struct {
+ Op AST
+}
+
+func (n *Nullary) print(ps *printState) {
+ if op, ok := n.Op.(*Operator); ok {
+ ps.writeString(op.Name)
+ } else {
+ ps.print(n.Op)
+ }
+}
+
+func (n *Nullary) Traverse(fn func(AST) bool) {
+ if fn(n) {
+ n.Op.Traverse(fn)
+ }
+}
+
+func (n *Nullary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(n) {
+ return nil
+ }
+ op := n.Op.Copy(fn, skip)
+ if op == nil {
+ return fn(n)
+ }
+ n = &Nullary{Op: op}
+ if r := fn(n); r != nil {
+ return r
+ }
+ return n
+}
+
+func (n *Nullary) GoString() string {
+ return n.goString(0, "")
+}
+
+func (n *Nullary) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sNullary:\n%s", indent, "", field,
+ n.Op.goString(indent+2, "Op: "))
+}
+
+// Unary is a unary operation in an expression.
+type Unary struct {
+ Op AST
+ Expr AST
+ Suffix bool // true for ++ -- when used as postfix
+ SizeofType bool // true for sizeof (type)
+}
+
+func (u *Unary) print(ps *printState) {
+ op, _ := u.Op.(*Operator)
+ expr := u.Expr
+
+ // Don't print the argument list when taking the address of a
+ // function.
+ if !ps.llvmStyle {
+ if op != nil && op.Name == "&" {
+ if t, ok := expr.(*Typed); ok {
+ if _, ok := t.Type.(*FunctionType); ok {
+ expr = t.Name
+ }
+ }
+ }
+ }
+
+ if u.Suffix {
+ parenthesize(ps, expr)
+ }
+
+ if op != nil {
+ ps.writeString(op.Name)
+ if ps.llvmStyle && op.Name == "noexcept" {
+ ps.writeByte(' ')
+ }
+ } else if c, ok := u.Op.(*Cast); ok {
+ ps.writeByte('(')
+ ps.print(c.To)
+ ps.writeByte(')')
+ } else {
+ ps.print(u.Op)
+ }
+
+ if !u.Suffix {
+ isDelete := op != nil && (op.Name == "delete " || op.Name == "delete[] ")
+ if op != nil && op.Name == "::" {
+ // Don't use parentheses after ::.
+ ps.print(expr)
+ } else if u.SizeofType {
+ // Always use parentheses for sizeof argument.
+ ps.writeByte('(')
+ ps.print(expr)
+ ps.writeByte(')')
+ } else if op != nil && op.Name == "__alignof__" {
+ // Always use parentheses for __alignof__ argument.
+ ps.writeByte('(')
+ ps.print(expr)
+ ps.writeByte(')')
+ } else if ps.llvmStyle {
+ if op == nil || (op.Name != `operator"" ` && !isDelete) {
+ ps.writeByte('(')
+ }
+ ps.print(expr)
+ if op == nil || (op.Name != `operator"" ` && !isDelete) {
+ ps.writeByte(')')
+ }
+ } else {
+ parenthesize(ps, expr)
+ }
+ }
+}
+
+func (u *Unary) Traverse(fn func(AST) bool) {
+ if fn(u) {
+ u.Op.Traverse(fn)
+ u.Expr.Traverse(fn)
+ }
+}
+
+func (u *Unary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(u) {
+ return nil
+ }
+ op := u.Op.Copy(fn, skip)
+ expr := u.Expr.Copy(fn, skip)
+ if op == nil && expr == nil {
+ return fn(u)
+ }
+ if op == nil {
+ op = u.Op
+ }
+ if expr == nil {
+ expr = u.Expr
+ }
+ u = &Unary{Op: op, Expr: expr, Suffix: u.Suffix, SizeofType: u.SizeofType}
+ if r := fn(u); r != nil {
+ return r
+ }
+ return u
+}
+
+func (u *Unary) GoString() string {
+ return u.goString(0, "")
+}
+
+func (u *Unary) goString(indent int, field string) string {
+ var s string
+ if u.Suffix {
+ s = " Suffix: true"
+ }
+ if u.SizeofType {
+ s += " SizeofType: true"
+ }
+ return fmt.Sprintf("%*s%sUnary:%s\n%s\n%s", indent, "", field,
+ s, u.Op.goString(indent+2, "Op: "),
+ u.Expr.goString(indent+2, "Expr: "))
+}
+
+// isDesignatedInitializer reports whether x is a designated
+// initializer.
+func isDesignatedInitializer(x AST) bool {
+ switch x := x.(type) {
+ case *Binary:
+ if op, ok := x.Op.(*Operator); ok {
+ if op.Name == "]=" {
+ return true
+ }
+ if op.Name != "=" {
+ return false
+ }
+ if _, ok := x.Left.(*Literal); ok {
+ return false
+ }
+ return true
+ }
+ case *Trinary:
+ if op, ok := x.Op.(*Operator); ok {
+ return op.Name == "[...]="
+ }
+ }
+ return false
+}
+
+// Binary is a binary operation in an expression.
+type Binary struct {
+ Op AST
+ Left AST
+ Right AST
+}
+
+func (b *Binary) print(ps *printState) {
+ op, _ := b.Op.(*Operator)
+
+ if op != nil && strings.Contains(op.Name, "cast") {
+ ps.writeString(op.Name)
+ ps.writeByte('<')
+ ps.print(b.Left)
+ ps.writeString(">(")
+ ps.print(b.Right)
+ ps.writeByte(')')
+ return
+ }
+
+ if isDesignatedInitializer(b) {
+ if op.Name == "=" {
+ ps.writeByte('.')
+ } else {
+ ps.writeByte('[')
+ }
+ ps.print(b.Left)
+ if op.Name == "]=" {
+ ps.writeByte(']')
+ }
+ if isDesignatedInitializer(b.Right) {
+ // Don't add anything between designated
+ // initializer chains.
+ ps.print(b.Right)
+ } else {
+ if ps.llvmStyle {
+ ps.writeString(" = ")
+ ps.print(b.Right)
+ } else {
+ ps.writeByte('=')
+ parenthesize(ps, b.Right)
+ }
+ }
+ return
+ }
+
+ // Use an extra set of parentheses around an expression that
+ // uses the greater-than operator, so that it does not get
+ // confused with the '>' that ends template parameters.
+ if op != nil && op.Name == ">" {
+ ps.writeByte('(')
+ }
+
+ left := b.Left
+
+ skipParens := false
+ skipBothParens := false
+ addSpaces := ps.llvmStyle
+ if ps.llvmStyle && op != nil {
+ switch op.Name {
+ case ".", "->":
+ skipBothParens = true
+ addSpaces = false
+ case "->*":
+ skipParens = true
+ addSpaces = false
+ }
+ }
+
+ // For a function call in an expression, don't print the types
+ // of the arguments unless there is a return type.
+ if op != nil && op.Name == "()" {
+ if ty, ok := b.Left.(*Typed); ok {
+ if ft, ok := ty.Type.(*FunctionType); ok {
+ if ft.Return == nil {
+ left = ty.Name
+ } else {
+ skipParens = true
+ }
+ } else {
+ left = ty.Name
+ }
+ }
+ if ps.llvmStyle {
+ skipParens = true
+ }
+ }
+
+ if skipParens || skipBothParens {
+ ps.print(left)
+ } else if ps.llvmStyle {
+ ps.writeByte('(')
+ ps.print(left)
+ ps.writeByte(')')
+ } else {
+ parenthesize(ps, left)
+ }
+
+ if op != nil && op.Name == "[]" {
+ ps.writeByte('[')
+ ps.print(b.Right)
+ ps.writeByte(']')
+ return
+ }
+
+ if op != nil {
+ if op.Name != "()" {
+ if addSpaces {
+ ps.writeByte(' ')
+ }
+ ps.writeString(op.Name)
+ if addSpaces {
+ ps.writeByte(' ')
+ }
+ }
+ } else {
+ ps.print(b.Op)
+ }
+
+ if skipBothParens {
+ ps.print(b.Right)
+ } else if ps.llvmStyle {
+ ps.writeByte('(')
+ ps.print(b.Right)
+ ps.writeByte(')')
+ } else {
+ parenthesize(ps, b.Right)
+ }
+
+ if op != nil && op.Name == ">" {
+ ps.writeByte(')')
+ }
+}
+
+func (b *Binary) Traverse(fn func(AST) bool) {
+ if fn(b) {
+ b.Op.Traverse(fn)
+ b.Left.Traverse(fn)
+ b.Right.Traverse(fn)
+ }
+}
+
+func (b *Binary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(b) {
+ return nil
+ }
+ op := b.Op.Copy(fn, skip)
+ left := b.Left.Copy(fn, skip)
+ right := b.Right.Copy(fn, skip)
+ if op == nil && left == nil && right == nil {
+ return fn(b)
+ }
+ if op == nil {
+ op = b.Op
+ }
+ if left == nil {
+ left = b.Left
+ }
+ if right == nil {
+ right = b.Right
+ }
+ b = &Binary{Op: op, Left: left, Right: right}
+ if r := fn(b); r != nil {
+ return r
+ }
+ return b
+}
+
+func (b *Binary) GoString() string {
+ return b.goString(0, "")
+}
+
+func (b *Binary) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sBinary:\n%s\n%s\n%s", indent, "", field,
+ b.Op.goString(indent+2, "Op: "),
+ b.Left.goString(indent+2, "Left: "),
+ b.Right.goString(indent+2, "Right: "))
+}
+
+// Trinary is the ?: trinary operation in an expression.
+type Trinary struct {
+ Op AST
+ First AST
+ Second AST
+ Third AST
+}
+
+func (t *Trinary) print(ps *printState) {
+ if isDesignatedInitializer(t) {
+ ps.writeByte('[')
+ ps.print(t.First)
+ ps.writeString(" ... ")
+ ps.print(t.Second)
+ ps.writeByte(']')
+ if isDesignatedInitializer(t.Third) {
+ // Don't add anything between designated
+ // initializer chains.
+ ps.print(t.Third)
+ } else {
+ if ps.llvmStyle {
+ ps.writeString(" = ")
+ ps.print(t.Third)
+ } else {
+ ps.writeByte('=')
+ parenthesize(ps, t.Third)
+ }
+ }
+ return
+ }
+
+ parenthesize(ps, t.First)
+ if ps.llvmStyle {
+ ps.writeString(" ? ")
+ } else {
+ ps.writeByte('?')
+ }
+ parenthesize(ps, t.Second)
+ ps.writeString(" : ")
+ parenthesize(ps, t.Third)
+}
+
+func (t *Trinary) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Op.Traverse(fn)
+ t.First.Traverse(fn)
+ t.Second.Traverse(fn)
+ t.Third.Traverse(fn)
+ }
+}
+
+func (t *Trinary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ op := t.Op.Copy(fn, skip)
+ first := t.First.Copy(fn, skip)
+ second := t.Second.Copy(fn, skip)
+ third := t.Third.Copy(fn, skip)
+ if op == nil && first == nil && second == nil && third == nil {
+ return fn(t)
+ }
+ if op == nil {
+ op = t.Op
+ }
+ if first == nil {
+ first = t.First
+ }
+ if second == nil {
+ second = t.Second
+ }
+ if third == nil {
+ third = t.Third
+ }
+ t = &Trinary{Op: op, First: first, Second: second, Third: third}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *Trinary) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *Trinary) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTrinary:\n%s\n%s\n%s\n%s", indent, "", field,
+ t.Op.goString(indent+2, "Op: "),
+ t.First.goString(indent+2, "First: "),
+ t.Second.goString(indent+2, "Second: "),
+ t.Third.goString(indent+2, "Third: "))
+}
+
+// Fold is a C++17 fold-expression. Arg2 is nil for a unary operator.
+type Fold struct {
+ Left bool
+ Op AST
+ Arg1 AST
+ Arg2 AST
+}
+
+func (f *Fold) print(ps *printState) {
+ op, _ := f.Op.(*Operator)
+ printOp := func() {
+ if op != nil {
+ if ps.llvmStyle {
+ ps.writeByte(' ')
+ }
+ ps.writeString(op.Name)
+ if ps.llvmStyle {
+ ps.writeByte(' ')
+ }
+ } else {
+ ps.print(f.Op)
+ }
+ }
+ foldParenthesize := func(a AST) {
+ if _, ok := a.(*ArgumentPack); ok || !ps.llvmStyle {
+ parenthesize(ps, a)
+ } else {
+ ps.print(a)
+ }
+ }
+
+ if f.Arg2 == nil {
+ if f.Left {
+ ps.writeString("(...")
+ printOp()
+ foldParenthesize(f.Arg1)
+ ps.writeString(")")
+ } else {
+ ps.writeString("(")
+ foldParenthesize(f.Arg1)
+ printOp()
+ ps.writeString("...)")
+ }
+ } else {
+ ps.writeString("(")
+ foldParenthesize(f.Arg1)
+ printOp()
+ ps.writeString("...")
+ printOp()
+ foldParenthesize(f.Arg2)
+ ps.writeString(")")
+ }
+}
+
+func (f *Fold) Traverse(fn func(AST) bool) {
+ if fn(f) {
+ f.Op.Traverse(fn)
+ f.Arg1.Traverse(fn)
+ if f.Arg2 != nil {
+ f.Arg2.Traverse(fn)
+ }
+ }
+}
+
+func (f *Fold) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(f) {
+ return nil
+ }
+ op := f.Op.Copy(fn, skip)
+ arg1 := f.Arg1.Copy(fn, skip)
+ var arg2 AST
+ if f.Arg2 != nil {
+ arg2 = f.Arg2.Copy(fn, skip)
+ }
+ if op == nil && arg1 == nil && arg2 == nil {
+ return fn(f)
+ }
+ if op == nil {
+ op = f.Op
+ }
+ if arg1 == nil {
+ arg1 = f.Arg1
+ }
+ if arg2 == nil {
+ arg2 = f.Arg2
+ }
+ f = &Fold{Left: f.Left, Op: op, Arg1: arg1, Arg2: arg2}
+ if r := fn(f); r != nil {
+ return r
+ }
+ return f
+}
+
+func (f *Fold) GoString() string {
+ return f.goString(0, "")
+}
+
+func (f *Fold) goString(indent int, field string) string {
+ if f.Arg2 == nil {
+ return fmt.Sprintf("%*s%sFold: Left: %t\n%s\n%s", indent, "", field,
+ f.Left, f.Op.goString(indent+2, "Op: "),
+ f.Arg1.goString(indent+2, "Arg1: "))
+ } else {
+ return fmt.Sprintf("%*s%sFold: Left: %t\n%s\n%s\n%s", indent, "", field,
+ f.Left, f.Op.goString(indent+2, "Op: "),
+ f.Arg1.goString(indent+2, "Arg1: "),
+ f.Arg2.goString(indent+2, "Arg2: "))
+ }
+}
+
+// Subobject is a a reference to an offset in an expression. This is
+// used for C++20 manglings of class types used as the type of
+// non-type template arguments.
+//
+// See https://github.com/itanium-cxx-abi/cxx-abi/issues/47.
+type Subobject struct {
+ Type AST
+ SubExpr AST
+ Offset int
+ Selectors []int
+ PastEnd bool
+}
+
+func (so *Subobject) print(ps *printState) {
+ ps.print(so.SubExpr)
+ ps.writeString(".<")
+ ps.print(so.Type)
+ ps.writeString(fmt.Sprintf(" at offset %d>", so.Offset))
+}
+
+func (so *Subobject) Traverse(fn func(AST) bool) {
+ if fn(so) {
+ so.Type.Traverse(fn)
+ so.SubExpr.Traverse(fn)
+ }
+}
+
+func (so *Subobject) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(so) {
+ return nil
+ }
+ typ := so.Type.Copy(fn, skip)
+ subExpr := so.SubExpr.Copy(fn, skip)
+ if typ == nil && subExpr == nil {
+ return nil
+ }
+ if typ == nil {
+ typ = so.Type
+ }
+ if subExpr == nil {
+ subExpr = so.SubExpr
+ }
+ so = &Subobject{
+ Type: typ,
+ SubExpr: subExpr,
+ Offset: so.Offset,
+ Selectors: so.Selectors,
+ PastEnd: so.PastEnd,
+ }
+ if r := fn(so); r != nil {
+ return r
+ }
+ return so
+}
+
+func (so *Subobject) GoString() string {
+ return so.goString(0, "")
+}
+
+func (so *Subobject) goString(indent int, field string) string {
+ var selectors string
+ for _, s := range so.Selectors {
+ selectors += fmt.Sprintf(" %d", s)
+ }
+ return fmt.Sprintf("%*s%sSubobject:\n%s\n%s\n%*sOffset: %d\n%*sSelectors:%s\n%*sPastEnd: %t",
+ indent, "", field,
+ so.Type.goString(indent+2, "Type: "),
+ so.SubExpr.goString(indent+2, "SubExpr: "),
+ indent+2, "", so.Offset,
+ indent+2, "", selectors,
+ indent+2, "", so.PastEnd)
+}
+
+// PtrMemCast is a conversion of an expression to a pointer-to-member
+// type. This is used for C++20 manglings of class types used as the
+// type of non-type template arguments.
+//
+// See https://github.com/itanium-cxx-abi/cxx-abi/issues/47.
+type PtrMemCast struct {
+ Type AST
+ Expr AST
+ Offset int
+}
+
+func (pmc *PtrMemCast) print(ps *printState) {
+ ps.writeString("(")
+ ps.print(pmc.Type)
+ ps.writeString(")(")
+ ps.print(pmc.Expr)
+ ps.writeString(")")
+}
+
+func (pmc *PtrMemCast) Traverse(fn func(AST) bool) {
+ if fn(pmc) {
+ pmc.Type.Traverse(fn)
+ pmc.Expr.Traverse(fn)
+ }
+}
+
+func (pmc *PtrMemCast) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pmc) {
+ return nil
+ }
+ typ := pmc.Type.Copy(fn, skip)
+ expr := pmc.Expr.Copy(fn, skip)
+ if typ == nil && expr == nil {
+ return nil
+ }
+ if typ == nil {
+ typ = pmc.Type
+ }
+ if expr == nil {
+ expr = pmc.Expr
+ }
+ pmc = &PtrMemCast{
+ Type: typ,
+ Expr: expr,
+ Offset: pmc.Offset,
+ }
+ if r := fn(pmc); r != nil {
+ return r
+ }
+ return pmc
+}
+
+func (pmc *PtrMemCast) GoString() string {
+ return pmc.goString(0, "")
+}
+
+func (pmc *PtrMemCast) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPtrMemCast:\n%s\n%s\n%*sOffset: %d",
+ indent, "", field,
+ pmc.Type.goString(indent+2, "Type: "),
+ pmc.Expr.goString(indent+2, "Expr: "),
+ indent+2, "", pmc.Offset)
+}
+
+// New is a use of operator new in an expression.
+type New struct {
+ Op AST
+ Place AST
+ Type AST
+ Init AST
+}
+
+func (n *New) print(ps *printState) {
+ if !ps.llvmStyle {
+ // Op doesn't really matter for printing--we always print "new".
+ ps.writeString("new ")
+ } else {
+ op, _ := n.Op.(*Operator)
+ if op != nil {
+ ps.writeString(op.Name)
+ if n.Place == nil {
+ ps.writeByte(' ')
+ }
+ } else {
+ ps.print(n.Op)
+ }
+ }
+ if n.Place != nil {
+ parenthesize(ps, n.Place)
+ ps.writeByte(' ')
+ }
+ ps.print(n.Type)
+ if n.Init != nil {
+ parenthesize(ps, n.Init)
+ }
+}
+
+func (n *New) Traverse(fn func(AST) bool) {
+ if fn(n) {
+ n.Op.Traverse(fn)
+ if n.Place != nil {
+ n.Place.Traverse(fn)
+ }
+ n.Type.Traverse(fn)
+ if n.Init != nil {
+ n.Init.Traverse(fn)
+ }
+ }
+}
+
+func (n *New) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(n) {
+ return nil
+ }
+ op := n.Op.Copy(fn, skip)
+ var place AST
+ if n.Place != nil {
+ place = n.Place.Copy(fn, skip)
+ }
+ typ := n.Type.Copy(fn, skip)
+ var ini AST
+ if n.Init != nil {
+ ini = n.Init.Copy(fn, skip)
+ }
+ if op == nil && place == nil && typ == nil && ini == nil {
+ return fn(n)
+ }
+ if op == nil {
+ op = n.Op
+ }
+ if place == nil {
+ place = n.Place
+ }
+ if typ == nil {
+ typ = n.Type
+ }
+ if ini == nil {
+ ini = n.Init
+ }
+ n = &New{Op: op, Place: place, Type: typ, Init: ini}
+ if r := fn(n); r != nil {
+ return r
+ }
+ return n
+}
+
+func (n *New) GoString() string {
+ return n.goString(0, "")
+}
+
+func (n *New) goString(indent int, field string) string {
+ var place string
+ if n.Place == nil {
+ place = fmt.Sprintf("%*sPlace: nil", indent, "")
+ } else {
+ place = n.Place.goString(indent+2, "Place: ")
+ }
+ var ini string
+ if n.Init == nil {
+ ini = fmt.Sprintf("%*sInit: nil", indent, "")
+ } else {
+ ini = n.Init.goString(indent+2, "Init: ")
+ }
+ return fmt.Sprintf("%*s%sNew:\n%s\n%s\n%s\n%s", indent, "", field,
+ n.Op.goString(indent+2, "Op: "), place,
+ n.Type.goString(indent+2, "Type: "), ini)
+}
+
+// Literal is a literal in an expression.
+type Literal struct {
+ Type AST
+ Val string
+ Neg bool
+}
+
+// Suffixes to use for constants of the given integer type.
+var builtinTypeSuffix = map[string]string{
+ "int": "",
+ "unsigned int": "u",
+ "long": "l",
+ "unsigned long": "ul",
+ "long long": "ll",
+ "unsigned long long": "ull",
+}
+
+// Builtin float types.
+var builtinTypeFloat = map[string]bool{
+ "double": true,
+ "long double": true,
+ "float": true,
+ "__float128": true,
+ "half": true,
+}
+
+func (l *Literal) print(ps *printState) {
+ isFloat := false
+ if b, ok := l.Type.(*BuiltinType); ok {
+ if suffix, ok := builtinTypeSuffix[b.Name]; ok {
+ if l.Neg {
+ ps.writeByte('-')
+ }
+ ps.writeString(l.Val)
+ ps.writeString(suffix)
+ return
+ } else if b.Name == "bool" && !l.Neg {
+ switch l.Val {
+ case "0":
+ ps.writeString("false")
+ return
+ case "1":
+ ps.writeString("true")
+ return
+ }
+ } else if b.Name == "decltype(nullptr)" && l.Val == "" {
+ if ps.llvmStyle {
+ ps.writeString("nullptr")
+ } else {
+ ps.print(l.Type)
+ }
+ return
+ } else {
+ isFloat = builtinTypeFloat[b.Name]
+ }
+ }
+
+ ps.writeByte('(')
+ ps.print(l.Type)
+ ps.writeByte(')')
+
+ if isFloat {
+ ps.writeByte('[')
+ }
+ if l.Neg {
+ ps.writeByte('-')
+ }
+ ps.writeString(l.Val)
+ if isFloat {
+ ps.writeByte(']')
+ }
+}
+
+func (l *Literal) Traverse(fn func(AST) bool) {
+ if fn(l) {
+ l.Type.Traverse(fn)
+ }
+}
+
+func (l *Literal) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(l) {
+ return nil
+ }
+ typ := l.Type.Copy(fn, skip)
+ if typ == nil {
+ return fn(l)
+ }
+ l = &Literal{Type: typ, Val: l.Val, Neg: l.Neg}
+ if r := fn(l); r != nil {
+ return r
+ }
+ return l
+}
+
+func (l *Literal) GoString() string {
+ return l.goString(0, "")
+}
+
+func (l *Literal) goString(indent int, field string) string {
+ var neg string
+ if l.Neg {
+ neg = " Neg: true"
+ }
+ return fmt.Sprintf("%*s%sLiteral:%s\n%s\n%*sVal: %s", indent, "", field,
+ neg, l.Type.goString(indent+2, "Type: "),
+ indent+2, "", l.Val)
+}
+
+// StringLiteral is a string literal.
+type StringLiteral struct {
+ Type AST
+}
+
+func (sl *StringLiteral) print(ps *printState) {
+ ps.writeString(`"<`)
+ sl.Type.print(ps)
+ ps.writeString(`>"`)
+}
+
+func (sl *StringLiteral) Traverse(fn func(AST) bool) {
+ if fn(sl) {
+ sl.Type.Traverse(fn)
+ }
+}
+
+func (sl *StringLiteral) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(sl) {
+ return nil
+ }
+ typ := sl.Type.Copy(fn, skip)
+ if typ == nil {
+ return fn(sl)
+ }
+ sl = &StringLiteral{Type: typ}
+ if r := fn(sl); r != nil {
+ return r
+ }
+ return sl
+}
+
+func (sl *StringLiteral) GoString() string {
+ return sl.goString(0, "")
+}
+
+func (sl *StringLiteral) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sStringLiteral:\n%s", indent, "", field,
+ sl.Type.goString(indent+2, ""))
+}
+
+// LambdaExpr is a literal that is a lambda expression.
+type LambdaExpr struct {
+ Type AST
+}
+
+func (le *LambdaExpr) print(ps *printState) {
+ ps.writeString("[]")
+ if cl, ok := le.Type.(*Closure); ok {
+ cl.printTypes(ps)
+ }
+ ps.writeString("{...}")
+}
+
+func (le *LambdaExpr) Traverse(fn func(AST) bool) {
+ if fn(le) {
+ le.Type.Traverse(fn)
+ }
+}
+
+func (le *LambdaExpr) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(le) {
+ return nil
+ }
+ typ := le.Type.Copy(fn, skip)
+ if typ == nil {
+ return fn(le)
+ }
+ le = &LambdaExpr{Type: typ}
+ if r := fn(le); r != nil {
+ return r
+ }
+ return le
+}
+
+func (le *LambdaExpr) GoString() string {
+ return le.goString(0, "")
+}
+
+func (le *LambdaExpr) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sLambdaExpr:\n%s", indent, "", field,
+ le.Type.goString(indent+2, ""))
+}
+
+// ExprList is a list of expressions, typically arguments to a
+// function call in an expression.
+type ExprList struct {
+ Exprs []AST
+}
+
+func (el *ExprList) print(ps *printState) {
+ for i, e := range el.Exprs {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(e)
+ }
+}
+
+func (el *ExprList) Traverse(fn func(AST) bool) {
+ if fn(el) {
+ for _, e := range el.Exprs {
+ e.Traverse(fn)
+ }
+ }
+}
+
+func (el *ExprList) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(el) {
+ return nil
+ }
+ exprs := make([]AST, len(el.Exprs))
+ changed := false
+ for i, e := range el.Exprs {
+ ec := e.Copy(fn, skip)
+ if ec == nil {
+ exprs[i] = e
+ } else {
+ exprs[i] = ec
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(el)
+ }
+ el = &ExprList{Exprs: exprs}
+ if r := fn(el); r != nil {
+ return r
+ }
+ return el
+}
+
+func (el *ExprList) GoString() string {
+ return el.goString(0, "")
+}
+
+func (el *ExprList) goString(indent int, field string) string {
+ if len(el.Exprs) == 0 {
+ return fmt.Sprintf("%*s%sExprList: nil", indent, "", field)
+ }
+ s := fmt.Sprintf("%*s%sExprList:", indent, "", field)
+ for i, e := range el.Exprs {
+ s += "\n"
+ s += e.goString(indent+2, fmt.Sprintf("%d: ", i))
+ }
+ return s
+}
+
+// InitializerList is an initializer list: an optional type with a
+// list of expressions.
+type InitializerList struct {
+ Type AST
+ Exprs AST
+}
+
+func (il *InitializerList) print(ps *printState) {
+ if il.Type != nil {
+ ps.print(il.Type)
+ }
+ ps.writeByte('{')
+ ps.print(il.Exprs)
+ ps.writeByte('}')
+}
+
+func (il *InitializerList) Traverse(fn func(AST) bool) {
+ if fn(il) {
+ if il.Type != nil {
+ il.Type.Traverse(fn)
+ }
+ il.Exprs.Traverse(fn)
+ }
+}
+
+func (il *InitializerList) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(il) {
+ return nil
+ }
+ var typ AST
+ if il.Type != nil {
+ typ = il.Type.Copy(fn, skip)
+ }
+ exprs := il.Exprs.Copy(fn, skip)
+ if typ == nil && exprs == nil {
+ return fn(il)
+ }
+ if typ == nil {
+ typ = il.Type
+ }
+ if exprs == nil {
+ exprs = il.Exprs
+ }
+ il = &InitializerList{Type: typ, Exprs: exprs}
+ if r := fn(il); r != nil {
+ return r
+ }
+ return il
+}
+
+func (il *InitializerList) GoString() string {
+ return il.goString(0, "")
+}
+
+func (il *InitializerList) goString(indent int, field string) string {
+ var t string
+ if il.Type == nil {
+ t = fmt.Sprintf("%*sType: nil", indent+2, "")
+ } else {
+ t = il.Type.goString(indent+2, "Type: ")
+ }
+ return fmt.Sprintf("%*s%sInitializerList:\n%s\n%s", indent, "", field,
+ t, il.Exprs.goString(indent+2, "Exprs: "))
+}
+
+// DefaultArg holds a default argument for a local name.
+type DefaultArg struct {
+ Num int
+ Arg AST
+}
+
+func (da *DefaultArg) print(ps *printState) {
+ if !ps.llvmStyle {
+ fmt.Fprintf(&ps.buf, "{default arg#%d}::", da.Num+1)
+ }
+ ps.print(da.Arg)
+}
+
+func (da *DefaultArg) Traverse(fn func(AST) bool) {
+ if fn(da) {
+ da.Arg.Traverse(fn)
+ }
+}
+
+func (da *DefaultArg) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(da) {
+ return nil
+ }
+ arg := da.Arg.Copy(fn, skip)
+ if arg == nil {
+ return fn(da)
+ }
+ da = &DefaultArg{Num: da.Num, Arg: arg}
+ if r := fn(da); r != nil {
+ return r
+ }
+ return da
+}
+
+func (da *DefaultArg) GoString() string {
+ return da.goString(0, "")
+}
+
+func (da *DefaultArg) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sDefaultArg: Num: %d\n%s", indent, "", field, da.Num,
+ da.Arg.goString(indent+2, "Arg: "))
+}
+
+// Closure is a closure, or lambda expression.
+type Closure struct {
+ TemplateArgs []AST
+ Types []AST
+ Num int
+}
+
+func (cl *Closure) print(ps *printState) {
+ if ps.llvmStyle {
+ if cl.Num == 0 {
+ ps.writeString("'lambda'")
+ } else {
+ ps.writeString(fmt.Sprintf("'lambda%d'", cl.Num-1))
+ }
+ } else {
+ ps.writeString("{lambda")
+ }
+ cl.printTypes(ps)
+ if !ps.llvmStyle {
+ ps.writeString(fmt.Sprintf("#%d}", cl.Num+1))
+ }
+}
+
+func (cl *Closure) printTypes(ps *printState) {
+ if len(cl.TemplateArgs) > 0 {
+ ps.writeString("<")
+ for i, a := range cl.TemplateArgs {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ }
+ ps.writeString(">")
+ }
+ ps.writeString("(")
+ for i, t := range cl.Types {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(t)
+ }
+ ps.writeString(")")
+}
+
+func (cl *Closure) Traverse(fn func(AST) bool) {
+ if fn(cl) {
+ for _, a := range cl.TemplateArgs {
+ a.Traverse(fn)
+ }
+ for _, t := range cl.Types {
+ t.Traverse(fn)
+ }
+ }
+}
+
+func (cl *Closure) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(cl) {
+ return nil
+ }
+ changed := false
+
+ args := make([]AST, len(cl.TemplateArgs))
+ for i, a := range cl.TemplateArgs {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+
+ types := make([]AST, len(cl.Types))
+ for i, t := range cl.Types {
+ tc := t.Copy(fn, skip)
+ if tc == nil {
+ types[i] = t
+ } else {
+ types[i] = tc
+ changed = true
+ }
+ }
+
+ if !changed {
+ return fn(cl)
+ }
+ cl = &Closure{TemplateArgs: args, Types: types, Num: cl.Num}
+ if r := fn(cl); r != nil {
+ return r
+ }
+ return cl
+}
+
+func (cl *Closure) GoString() string {
+ return cl.goString(0, "")
+}
+
+func (cl *Closure) goString(indent int, field string) string {
+ var args string
+ if len(cl.TemplateArgs) == 0 {
+ args = fmt.Sprintf("%*sTemplateArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sTemplateArgs:", indent+2, "")
+ for i, a := range cl.TemplateArgs {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ var types string
+ if len(cl.Types) == 0 {
+ types = fmt.Sprintf("%*sTypes: nil", indent+2, "")
+ } else {
+ types = fmt.Sprintf("%*sTypes:", indent+2, "")
+ for i, t := range cl.Types {
+ types += "\n"
+ types += t.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sClosure: Num: %d\n%s\n%s", indent, "", field,
+ cl.Num, args, types)
+}
+
+// StructuredBindings is a structured binding declaration.
+type StructuredBindings struct {
+ Bindings []AST
+}
+
+func (sb *StructuredBindings) print(ps *printState) {
+ ps.writeString("[")
+ for i, b := range sb.Bindings {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ b.print(ps)
+ }
+ ps.writeString("]")
+}
+
+func (sb *StructuredBindings) Traverse(fn func(AST) bool) {
+ if fn(sb) {
+ for _, b := range sb.Bindings {
+ b.Traverse(fn)
+ }
+ }
+}
+
+func (sb *StructuredBindings) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(sb) {
+ return nil
+ }
+ changed := false
+ bindings := make([]AST, len(sb.Bindings))
+ for i, b := range sb.Bindings {
+ bc := b.Copy(fn, skip)
+ if bc == nil {
+ bindings[i] = b
+ } else {
+ bindings[i] = bc
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(sb)
+ }
+ sb = &StructuredBindings{Bindings: bindings}
+ if r := fn(sb); r != nil {
+ return r
+ }
+ return sb
+}
+
+func (sb *StructuredBindings) GoString() string {
+ return sb.goString(0, "")
+}
+
+func (sb *StructuredBindings) goString(indent int, field string) string {
+ var strb strings.Builder
+ fmt.Fprintf(&strb, "%*s%sStructuredBinding:", indent, "", field)
+ for _, b := range sb.Bindings {
+ strb.WriteByte('\n')
+ strb.WriteString(b.goString(indent+2, ""))
+ }
+ return strb.String()
+}
+
+// UnnamedType is an unnamed type, that just has an index.
+type UnnamedType struct {
+ Num int
+}
+
+func (ut *UnnamedType) print(ps *printState) {
+ if ps.llvmStyle {
+ if ut.Num == 0 {
+ ps.writeString("'unnamed'")
+ } else {
+ ps.writeString(fmt.Sprintf("'unnamed%d'", ut.Num-1))
+ }
+ } else {
+ ps.writeString(fmt.Sprintf("{unnamed type#%d}", ut.Num+1))
+ }
+}
+
+func (ut *UnnamedType) Traverse(fn func(AST) bool) {
+ fn(ut)
+}
+
+func (ut *UnnamedType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ut) {
+ return nil
+ }
+ return fn(ut)
+}
+
+func (ut *UnnamedType) GoString() string {
+ return ut.goString(0, "")
+}
+
+func (ut *UnnamedType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sUnnamedType: Num: %d", indent, "", field, ut.Num)
+}
+
+// Clone is a clone of a function, with a distinguishing suffix.
+type Clone struct {
+ Base AST
+ Suffix string
+}
+
+func (c *Clone) print(ps *printState) {
+ ps.print(c.Base)
+ if ps.llvmStyle {
+ ps.writeString(" (")
+ ps.writeString(c.Suffix)
+ ps.writeByte(')')
+ } else {
+ ps.writeString(fmt.Sprintf(" [clone %s]", c.Suffix))
+ }
+}
+
+func (c *Clone) Traverse(fn func(AST) bool) {
+ if fn(c) {
+ c.Base.Traverse(fn)
+ }
+}
+
+func (c *Clone) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(c) {
+ return nil
+ }
+ base := c.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(c)
+ }
+ c = &Clone{Base: base, Suffix: c.Suffix}
+ if r := fn(c); r != nil {
+ return r
+ }
+ return c
+}
+
+func (c *Clone) GoString() string {
+ return c.goString(0, "")
+}
+
+func (c *Clone) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sClone: Suffix: %s\n%s", indent, "", field,
+ c.Suffix, c.Base.goString(indent+2, "Base: "))
+}
+
+// Special is a special symbol, printed as a prefix plus another
+// value.
+type Special struct {
+ Prefix string
+ Val AST
+}
+
+func (s *Special) print(ps *printState) {
+ prefix := s.Prefix
+ if ps.llvmStyle {
+ switch prefix {
+ case "TLS wrapper function for ":
+ prefix = "thread-local wrapper routine for "
+ case "TLS init function for ":
+ prefix = "thread-local initialization routine for "
+ }
+ }
+ ps.writeString(prefix)
+ ps.print(s.Val)
+}
+
+func (s *Special) Traverse(fn func(AST) bool) {
+ if fn(s) {
+ s.Val.Traverse(fn)
+ }
+}
+
+func (s *Special) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(s) {
+ return nil
+ }
+ val := s.Val.Copy(fn, skip)
+ if val == nil {
+ return fn(s)
+ }
+ s = &Special{Prefix: s.Prefix, Val: val}
+ if r := fn(s); r != nil {
+ return r
+ }
+ return s
+}
+
+func (s *Special) GoString() string {
+ return s.goString(0, "")
+}
+
+func (s *Special) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sSpecial: Prefix: %s\n%s", indent, "", field,
+ s.Prefix, s.Val.goString(indent+2, "Val: "))
+}
+
+// Special2 is like special, but uses two values.
+type Special2 struct {
+ Prefix string
+ Val1 AST
+ Middle string
+ Val2 AST
+}
+
+func (s *Special2) print(ps *printState) {
+ ps.writeString(s.Prefix)
+ ps.print(s.Val1)
+ ps.writeString(s.Middle)
+ ps.print(s.Val2)
+}
+
+func (s *Special2) Traverse(fn func(AST) bool) {
+ if fn(s) {
+ s.Val1.Traverse(fn)
+ s.Val2.Traverse(fn)
+ }
+}
+
+func (s *Special2) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(s) {
+ return nil
+ }
+ val1 := s.Val1.Copy(fn, skip)
+ val2 := s.Val2.Copy(fn, skip)
+ if val1 == nil && val2 == nil {
+ return fn(s)
+ }
+ if val1 == nil {
+ val1 = s.Val1
+ }
+ if val2 == nil {
+ val2 = s.Val2
+ }
+ s = &Special2{Prefix: s.Prefix, Val1: val1, Middle: s.Middle, Val2: val2}
+ if r := fn(s); r != nil {
+ return r
+ }
+ return s
+}
+
+func (s *Special2) GoString() string {
+ return s.goString(0, "")
+}
+
+func (s *Special2) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sSpecial2: Prefix: %s\n%s\n%*sMiddle: %s\n%s", indent, "", field,
+ s.Prefix, s.Val1.goString(indent+2, "Val1: "),
+ indent+2, "", s.Middle, s.Val2.goString(indent+2, "Val2: "))
+}
+
+// EnableIf is used by clang for an enable_if attribute.
+type EnableIf struct {
+ Type AST
+ Args []AST
+}
+
+func (ei *EnableIf) print(ps *printState) {
+ ps.print(ei.Type)
+ ps.writeString(" [enable_if:")
+ first := true
+ for _, a := range ei.Args {
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ first = false
+ }
+ ps.writeString("]")
+}
+
+func (ei *EnableIf) Traverse(fn func(AST) bool) {
+ if fn(ei) {
+ ei.Type.Traverse(fn)
+ for _, a := range ei.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (ei *EnableIf) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ei) {
+ return nil
+ }
+ typ := ei.Type.Copy(fn, skip)
+ argsChanged := false
+ args := make([]AST, len(ei.Args))
+ for i, a := range ei.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ argsChanged = true
+ }
+ }
+ if typ == nil && !argsChanged {
+ return fn(ei)
+ }
+ if typ == nil {
+ typ = ei.Type
+ }
+ ei = &EnableIf{Type: typ, Args: args}
+ if r := fn(ei); r != nil {
+ return r
+ }
+ return ei
+}
+
+func (ei *EnableIf) GoString() string {
+ return ei.goString(0, "")
+}
+
+func (ei *EnableIf) goString(indent int, field string) string {
+ var args string
+ if len(ei.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range ei.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sEnableIf:\n%s\n%s", indent, "", field,
+ ei.Type.goString(indent+2, "Type: "), args)
+}
+
+// Print the inner types.
+func (ps *printState) printInner(prefixOnly bool) []AST {
+ var save []AST
+ var psave *[]AST
+ if prefixOnly {
+ psave = &save
+ }
+ for len(ps.inner) > 0 {
+ ps.printOneInner(psave)
+ }
+ return save
+}
+
+// innerPrinter is an interface for types that can print themselves as
+// inner types.
+type innerPrinter interface {
+ printInner(*printState)
+}
+
+// Print the most recent inner type. If save is not nil, only print
+// prefixes.
+func (ps *printState) printOneInner(save *[]AST) {
+ if len(ps.inner) == 0 {
+ panic("printOneInner called with no inner types")
+ }
+ ln := len(ps.inner)
+ a := ps.inner[ln-1]
+ ps.inner = ps.inner[:ln-1]
+
+ if save != nil {
+ if _, ok := a.(*MethodWithQualifiers); ok {
+ *save = append(*save, a)
+ return
+ }
+ }
+
+ if ip, ok := a.(innerPrinter); ok {
+ ip.printInner(ps)
+ } else {
+ ps.print(a)
+ }
+}
+
+// isEmpty returns whether printing a will not print anything.
+func (ps *printState) isEmpty(a AST) bool {
+ switch a := a.(type) {
+ case *ArgumentPack:
+ for _, a := range a.Args {
+ if !ps.isEmpty(a) {
+ return false
+ }
+ }
+ return true
+ case *ExprList:
+ return len(a.Exprs) == 0
+ case *PackExpansion:
+ return a.Pack != nil && ps.isEmpty(a.Base)
+ default:
+ return false
+ }
+}
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go b/src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go
new file mode 100644
index 0000000..14e77a6
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go
@@ -0,0 +1,3362 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package demangle defines functions that demangle GCC/LLVM
+// C++ and Rust symbol names.
+// This package recognizes names that were mangled according to the C++ ABI
+// defined at http://codesourcery.com/cxx-abi/ and the Rust ABI
+// defined at
+// https://rust-lang.github.io/rfcs/2603-rust-symbol-name-mangling-v0.html
+//
+// Most programs will want to call Filter or ToString.
+package demangle
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+)
+
+// ErrNotMangledName is returned by CheckedDemangle if the string does
+// not appear to be a C++ symbol name.
+var ErrNotMangledName = errors.New("not a C++ or Rust mangled name")
+
+// Option is the type of demangler options.
+type Option int
+
+const (
+ // The NoParams option disables demangling of function parameters.
+ // It only omits the parameters of the function name being demangled,
+ // not the parameter types of other functions that may be mentioned.
+ // Using the option will speed up the demangler and cause it to
+ // use less memory.
+ NoParams Option = iota
+
+ // The NoTemplateParams option disables demangling of template parameters.
+ // This applies to both C++ and Rust.
+ NoTemplateParams
+
+ // The NoEnclosingParams option disables demangling of the function
+ // parameter types of the enclosing function when demangling a
+ // local name defined within a function.
+ NoEnclosingParams
+
+ // The NoClones option disables inclusion of clone suffixes.
+ // NoParams implies NoClones.
+ NoClones
+
+ // The NoRust option disables demangling of old-style Rust
+ // mangled names, which can be confused with C++ style mangled
+ // names. New style Rust mangled names are still recognized.
+ NoRust
+
+ // The Verbose option turns on more verbose demangling.
+ Verbose
+
+ // LLVMStyle tries to translate an AST to a string in the
+ // style of the LLVM demangler. This does not affect
+ // the parsing of the AST, only the conversion of the AST
+ // to a string.
+ LLVMStyle
+)
+
+// maxLengthShift is how we shift the MaxLength value.
+const maxLengthShift = 16
+
+// maxLengthMask is a mask for the maxLength value.
+const maxLengthMask = 0x1f << maxLengthShift
+
+// MaxLength returns an Option that limits the maximum length of a
+// demangled string. The maximum length is expressed as a power of 2,
+// so a value of 1 limits the returned string to 2 characters, and
+// a value of 16 limits the returned string to 65,536 characters.
+// The value must be between 1 and 30.
+func MaxLength(pow int) Option {
+ if pow <= 0 || pow > 30 {
+ panic("demangle: invalid MaxLength value")
+ }
+ return Option(pow << maxLengthShift)
+}
+
+// isMaxLength reports whether an Option holds a maximum length.
+func isMaxLength(opt Option) bool {
+ return opt&maxLengthMask != 0
+}
+
+// maxLength returns the maximum length stored in an Option.
+func maxLength(opt Option) int {
+ return 1 << ((opt & maxLengthMask) >> maxLengthShift)
+}
+
+// Filter demangles a C++ or Rust symbol name,
+// returning the human-readable C++ or Rust name.
+// If any error occurs during demangling, the input string is returned.
+func Filter(name string, options ...Option) string {
+ ret, err := ToString(name, options...)
+ if err != nil {
+ return name
+ }
+ return ret
+}
+
+// ToString demangles a C++ or Rust symbol name,
+// returning a human-readable C++ or Rust name or an error.
+// If the name does not appear to be a C++ or Rust symbol name at all,
+// the error will be ErrNotMangledName.
+func ToString(name string, options ...Option) (string, error) {
+ if strings.HasPrefix(name, "_R") {
+ return rustToString(name, options)
+ }
+
+ // Check for an old-style Rust mangled name.
+ // It starts with _ZN and ends with "17h" followed by 16 hex digits
+ // followed by "E" followed by an optional suffix starting with "."
+ // (which we ignore).
+ if strings.HasPrefix(name, "_ZN") {
+ rname := name
+ if pos := strings.LastIndex(rname, "E."); pos > 0 {
+ rname = rname[:pos+1]
+ }
+ if strings.HasSuffix(rname, "E") && len(rname) > 23 && rname[len(rname)-20:len(rname)-17] == "17h" {
+ noRust := false
+ for _, o := range options {
+ if o == NoRust {
+ noRust = true
+ break
+ }
+ }
+ if !noRust {
+ s, ok := oldRustToString(rname, options)
+ if ok {
+ return s, nil
+ }
+ }
+ }
+ }
+
+ a, err := ToAST(name, options...)
+ if err != nil {
+ return "", err
+ }
+ return ASTToString(a, options...), nil
+}
+
+// ToAST demangles a C++ symbol name into an abstract syntax tree
+// representing the symbol.
+// If the NoParams option is passed, and the name has a function type,
+// the parameter types are not demangled.
+// If the name does not appear to be a C++ symbol name at all, the
+// error will be ErrNotMangledName.
+// This function does not currently support Rust symbol names.
+func ToAST(name string, options ...Option) (AST, error) {
+ if strings.HasPrefix(name, "_Z") {
+ a, err := doDemangle(name[2:], options...)
+ return a, adjustErr(err, 2)
+ }
+
+ if strings.HasPrefix(name, "___Z") {
+ // clang extensions
+ block := strings.LastIndex(name, "_block_invoke")
+ if block == -1 {
+ return nil, ErrNotMangledName
+ }
+ a, err := doDemangle(name[4:block], options...)
+ if err != nil {
+ return a, adjustErr(err, 4)
+ }
+ name = strings.TrimPrefix(name[block:], "_block_invoke")
+ if len(name) > 0 && name[0] == '_' {
+ name = name[1:]
+ }
+ for len(name) > 0 && isDigit(name[0]) {
+ name = name[1:]
+ }
+ if len(name) > 0 && name[0] != '.' {
+ return nil, errors.New("unparsed characters at end of mangled name")
+ }
+ a = &Special{Prefix: "invocation function for block in ", Val: a}
+ return a, nil
+ }
+
+ const prefix = "_GLOBAL_"
+ if strings.HasPrefix(name, prefix) {
+ // The standard demangler ignores NoParams for global
+ // constructors. We are compatible.
+ i := 0
+ for i < len(options) {
+ if options[i] == NoParams {
+ options = append(options[:i], options[i+1:]...)
+ } else {
+ i++
+ }
+ }
+ a, err := globalCDtorName(name[len(prefix):], options...)
+ return a, adjustErr(err, len(prefix))
+ }
+
+ return nil, ErrNotMangledName
+}
+
+// globalCDtorName demangles a global constructor/destructor symbol name.
+// The parameter is the string following the "_GLOBAL_" prefix.
+func globalCDtorName(name string, options ...Option) (AST, error) {
+ if len(name) < 4 {
+ return nil, ErrNotMangledName
+ }
+ switch name[0] {
+ case '.', '_', '$':
+ default:
+ return nil, ErrNotMangledName
+ }
+
+ var ctor bool
+ switch name[1] {
+ case 'I':
+ ctor = true
+ case 'D':
+ ctor = false
+ default:
+ return nil, ErrNotMangledName
+ }
+
+ if name[2] != '_' {
+ return nil, ErrNotMangledName
+ }
+
+ if !strings.HasPrefix(name[3:], "_Z") {
+ return &GlobalCDtor{Ctor: ctor, Key: &Name{Name: name}}, nil
+ } else {
+ a, err := doDemangle(name[5:], options...)
+ if err != nil {
+ return nil, adjustErr(err, 5)
+ }
+ return &GlobalCDtor{Ctor: ctor, Key: a}, nil
+ }
+}
+
+// The doDemangle function is the entry point into the demangler proper.
+func doDemangle(name string, options ...Option) (ret AST, err error) {
+ // When the demangling routines encounter an error, they panic
+ // with a value of type demangleErr.
+ defer func() {
+ if r := recover(); r != nil {
+ if de, ok := r.(demangleErr); ok {
+ ret = nil
+ err = de
+ return
+ }
+ panic(r)
+ }
+ }()
+
+ params := true
+ clones := true
+ verbose := false
+ for _, o := range options {
+ switch {
+ case o == NoParams:
+ params = false
+ clones = false
+ case o == NoClones:
+ clones = false
+ case o == Verbose:
+ verbose = true
+ case o == NoTemplateParams || o == NoEnclosingParams || o == LLVMStyle || isMaxLength(o):
+ // These are valid options but only affect
+ // printing of the AST.
+ case o == NoRust:
+ // Unimportant here.
+ default:
+ return nil, fmt.Errorf("unrecognized demangler option %v", o)
+ }
+ }
+
+ st := &state{str: name, verbose: verbose}
+ a := st.encoding(params, notForLocalName)
+
+ // Accept a clone suffix.
+ if clones {
+ for len(st.str) > 1 && st.str[0] == '.' && (isLower(st.str[1]) || st.str[1] == '_' || isDigit(st.str[1])) {
+ a = st.cloneSuffix(a)
+ }
+ }
+
+ if clones && len(st.str) > 0 {
+ st.fail("unparsed characters at end of mangled name")
+ }
+
+ return a, nil
+}
+
+// A state holds the current state of demangling a string.
+type state struct {
+ str string // remainder of string to demangle
+ verbose bool // whether to use verbose demangling
+ off int // offset of str within original string
+ subs substitutions // substitutions
+ templates []*Template // templates being processed
+
+ // The number of entries in templates when we started parsing
+ // a lambda, plus 1 so that 0 means not parsing a lambda.
+ lambdaTemplateLevel int
+
+ // Counts of template parameters without template arguments,
+ // for lambdas.
+ typeTemplateParamCount int
+ nonTypeTemplateParamCount int
+ templateTemplateParamCount int
+}
+
+// copy returns a copy of the current state.
+func (st *state) copy() *state {
+ n := new(state)
+ *n = *st
+ return n
+}
+
+// fail panics with demangleErr, to be caught in doDemangle.
+func (st *state) fail(err string) {
+ panic(demangleErr{err: err, off: st.off})
+}
+
+// failEarlier is like fail, but decrements the offset to indicate
+// that the point of failure occurred earlier in the string.
+func (st *state) failEarlier(err string, dec int) {
+ if st.off < dec {
+ panic("internal error")
+ }
+ panic(demangleErr{err: err, off: st.off - dec})
+}
+
+// advance advances the current string offset.
+func (st *state) advance(add int) {
+ if len(st.str) < add {
+ panic("internal error")
+ }
+ st.str = st.str[add:]
+ st.off += add
+}
+
+// checkChar requires that the next character in the string be c, and
+// advances past it.
+func (st *state) checkChar(c byte) {
+ if len(st.str) == 0 || st.str[0] != c {
+ panic("internal error")
+ }
+ st.advance(1)
+}
+
+// A demangleErr is an error at a specific offset in the mangled
+// string.
+type demangleErr struct {
+ err string
+ off int
+}
+
+// Error implements the builtin error interface for demangleErr.
+func (de demangleErr) Error() string {
+ return fmt.Sprintf("%s at %d", de.err, de.off)
+}
+
+// adjustErr adjusts the position of err, if it is a demangleErr,
+// and returns err.
+func adjustErr(err error, adj int) error {
+ if err == nil {
+ return nil
+ }
+ if de, ok := err.(demangleErr); ok {
+ de.off += adj
+ return de
+ }
+ return err
+}
+
+type forLocalNameType int
+
+const (
+ forLocalName forLocalNameType = iota
+ notForLocalName
+)
+
+// encoding parses:
+//
+// encoding ::= <(function) name> <bare-function-type>
+// <(data) name>
+// <special-name>
+func (st *state) encoding(params bool, local forLocalNameType) AST {
+ if len(st.str) < 1 {
+ st.fail("expected encoding")
+ }
+
+ if st.str[0] == 'G' || st.str[0] == 'T' {
+ return st.specialName()
+ }
+
+ a := st.name()
+ a = simplify(a)
+
+ if !params {
+ // Don't demangle the parameters.
+
+ // Strip CV-qualifiers, as they apply to the 'this'
+ // parameter, and are not output by the standard
+ // demangler without parameters.
+ if mwq, ok := a.(*MethodWithQualifiers); ok {
+ a = mwq.Method
+ }
+
+ // If this is a local name, there may be CV-qualifiers
+ // on the name that really apply to the top level, and
+ // therefore must be discarded when discarding
+ // parameters. This can happen when parsing a class
+ // that is local to a function.
+ if q, ok := a.(*Qualified); ok && q.LocalName {
+ p := &q.Name
+ if da, ok := (*p).(*DefaultArg); ok {
+ p = &da.Arg
+ }
+ if mwq, ok := (*p).(*MethodWithQualifiers); ok {
+ *p = mwq.Method
+ }
+ }
+
+ return a
+ }
+
+ if len(st.str) == 0 || st.str[0] == 'E' {
+ // There are no parameters--this is a data symbol, not
+ // a function symbol.
+ return a
+ }
+
+ mwq, _ := a.(*MethodWithQualifiers)
+
+ var findTemplate func(AST) *Template
+ findTemplate = func(check AST) *Template {
+ switch check := check.(type) {
+ case *Template:
+ return check
+ case *Qualified:
+ if check.LocalName {
+ return findTemplate(check.Name)
+ } else if _, ok := check.Name.(*Constructor); ok {
+ return findTemplate(check.Name)
+ }
+ case *MethodWithQualifiers:
+ return findTemplate(check.Method)
+ case *Constructor:
+ if check.Base != nil {
+ return findTemplate(check.Base)
+ }
+ }
+ return nil
+ }
+
+ template := findTemplate(a)
+ var oldLambdaTemplateLevel int
+ if template != nil {
+ st.templates = append(st.templates, template)
+ oldLambdaTemplateLevel = st.lambdaTemplateLevel
+ st.lambdaTemplateLevel = 0
+ }
+
+ // Checking for the enable_if attribute here is what the LLVM
+ // demangler does. This is not very general but perhaps it is
+ // sufficient.
+ const enableIfPrefix = "Ua9enable_ifI"
+ var enableIfArgs []AST
+ if strings.HasPrefix(st.str, enableIfPrefix) {
+ st.advance(len(enableIfPrefix) - 1)
+ enableIfArgs = st.templateArgs()
+ }
+
+ ft := st.bareFunctionType(hasReturnType(a))
+
+ if template != nil {
+ st.templates = st.templates[:len(st.templates)-1]
+ st.lambdaTemplateLevel = oldLambdaTemplateLevel
+ }
+
+ ft = simplify(ft)
+
+ // For a local name, discard the return type, so that it
+ // doesn't get confused with the top level return type.
+ if local == forLocalName {
+ if functype, ok := ft.(*FunctionType); ok {
+ functype.ForLocalName = true
+ }
+ }
+
+ // Any top-level qualifiers belong to the function type.
+ if mwq != nil {
+ a = mwq.Method
+ mwq.Method = ft
+ ft = mwq
+ }
+ if q, ok := a.(*Qualified); ok && q.LocalName {
+ p := &q.Name
+ if da, ok := (*p).(*DefaultArg); ok {
+ p = &da.Arg
+ }
+ if mwq, ok := (*p).(*MethodWithQualifiers); ok {
+ *p = mwq.Method
+ mwq.Method = ft
+ ft = mwq
+ }
+ }
+
+ r := AST(&Typed{Name: a, Type: ft})
+
+ if len(enableIfArgs) > 0 {
+ r = &EnableIf{Type: r, Args: enableIfArgs}
+ }
+
+ return r
+}
+
+// hasReturnType returns whether the mangled form of a will have a
+// return type.
+func hasReturnType(a AST) bool {
+ switch a := a.(type) {
+ case *Qualified:
+ if a.LocalName {
+ return hasReturnType(a.Name)
+ }
+ return false
+ case *Template:
+ return !isCDtorConversion(a.Name)
+ case *TypeWithQualifiers:
+ return hasReturnType(a.Base)
+ case *MethodWithQualifiers:
+ return hasReturnType(a.Method)
+ default:
+ return false
+ }
+}
+
+// isCDtorConversion returns when an AST is a constructor, a
+// destructor, or a conversion operator.
+func isCDtorConversion(a AST) bool {
+ switch a := a.(type) {
+ case *Qualified:
+ return isCDtorConversion(a.Name)
+ case *Constructor, *Destructor, *Cast:
+ return true
+ default:
+ return false
+ }
+}
+
+// taggedName parses:
+//
+// <tagged-name> ::= <name> B <source-name>
+func (st *state) taggedName(a AST) AST {
+ for len(st.str) > 0 && st.str[0] == 'B' {
+ st.advance(1)
+ tag := st.sourceName()
+ a = &TaggedName{Name: a, Tag: tag}
+ }
+ return a
+}
+
+// name parses:
+//
+// <name> ::= <nested-name>
+// ::= <unscoped-name>
+// ::= <unscoped-template-name> <template-args>
+// ::= <local-name>
+//
+// <unscoped-name> ::= <unqualified-name>
+// ::= St <unqualified-name>
+//
+// <unscoped-template-name> ::= <unscoped-name>
+// ::= <substitution>
+func (st *state) name() AST {
+ if len(st.str) < 1 {
+ st.fail("expected name")
+ }
+ switch st.str[0] {
+ case 'N':
+ return st.nestedName()
+ case 'Z':
+ return st.localName()
+ case 'U':
+ a, isCast := st.unqualifiedName()
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+ case 'S':
+ if len(st.str) < 2 {
+ st.advance(1)
+ st.fail("expected substitution index")
+ }
+ var a AST
+ isCast := false
+ subst := false
+ if st.str[1] == 't' {
+ st.advance(2)
+ a, isCast = st.unqualifiedName()
+ a = &Qualified{Scope: &Name{Name: "std"}, Name: a, LocalName: false}
+ } else {
+ a = st.substitution(false)
+ subst = true
+ }
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ // This can only happen if we saw
+ // <unscoped-template-name> and are about to see
+ // <template-args>. <unscoped-template-name> is a
+ // substitution candidate if it did not come from a
+ // substitution.
+ if !subst {
+ st.subs.add(a)
+ }
+ args := st.templateArgs()
+ tmpl := &Template{Name: a, Args: args}
+ if isCast {
+ st.setTemplate(a, tmpl)
+ st.clearTemplateArgs(args)
+ isCast = false
+ }
+ a = tmpl
+ }
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+
+ default:
+ a, isCast := st.unqualifiedName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ st.subs.add(a)
+ args := st.templateArgs()
+ tmpl := &Template{Name: a, Args: args}
+ if isCast {
+ st.setTemplate(a, tmpl)
+ st.clearTemplateArgs(args)
+ isCast = false
+ }
+ a = tmpl
+ }
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+ }
+}
+
+// nestedName parses:
+//
+// <nested-name> ::= N [<CV-qualifiers>] [<ref-qualifier>] <prefix> <unqualified-name> E
+// ::= N [<CV-qualifiers>] [<ref-qualifier>] <template-prefix> <template-args> E
+func (st *state) nestedName() AST {
+ st.checkChar('N')
+ q := st.cvQualifiers()
+ r := st.refQualifier()
+ a := st.prefix()
+ if q != nil || r != "" {
+ a = &MethodWithQualifiers{Method: a, Qualifiers: q, RefQualifier: r}
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after nested name")
+ }
+ st.advance(1)
+ return a
+}
+
+// prefix parses:
+//
+// <prefix> ::= <prefix> <unqualified-name>
+// ::= <template-prefix> <template-args>
+// ::= <template-param>
+// ::= <decltype>
+// ::=
+// ::= <substitution>
+//
+// <template-prefix> ::= <prefix> <(template) unqualified-name>
+// ::= <template-param>
+// ::= <substitution>
+//
+// <decltype> ::= Dt <expression> E
+// ::= DT <expression> E
+func (st *state) prefix() AST {
+ var a AST
+
+ // The last name seen, for a constructor/destructor.
+ var last AST
+
+ getLast := func(a AST) AST {
+ for {
+ if t, ok := a.(*Template); ok {
+ a = t.Name
+ } else if q, ok := a.(*Qualified); ok {
+ a = q.Name
+ } else if t, ok := a.(*TaggedName); ok {
+ a = t.Name
+ } else {
+ return a
+ }
+ }
+ }
+
+ var cast *Cast
+ for {
+ if len(st.str) == 0 {
+ st.fail("expected prefix")
+ }
+ var next AST
+
+ c := st.str[0]
+ if isDigit(c) || isLower(c) || c == 'U' || c == 'L' || (c == 'D' && len(st.str) > 1 && st.str[1] == 'C') {
+ un, isUnCast := st.unqualifiedName()
+ next = un
+ if isUnCast {
+ if tn, ok := un.(*TaggedName); ok {
+ un = tn.Name
+ }
+ cast = un.(*Cast)
+ }
+ } else {
+ switch st.str[0] {
+ case 'C':
+ inheriting := false
+ st.advance(1)
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ inheriting = true
+ st.advance(1)
+ }
+ if len(st.str) < 1 {
+ st.fail("expected constructor type")
+ }
+ if last == nil {
+ st.fail("constructor before name is seen")
+ }
+ st.advance(1)
+ var base AST
+ if inheriting {
+ base = st.demangleType(false)
+ }
+ next = &Constructor{
+ Name: getLast(last),
+ Base: base,
+ }
+ if len(st.str) > 0 && st.str[0] == 'B' {
+ next = st.taggedName(next)
+ }
+ case 'D':
+ if len(st.str) > 1 && (st.str[1] == 'T' || st.str[1] == 't') {
+ next = st.demangleType(false)
+ } else {
+ if len(st.str) < 2 {
+ st.fail("expected destructor type")
+ }
+ if last == nil {
+ st.fail("destructor before name is seen")
+ }
+ st.advance(2)
+ next = &Destructor{Name: getLast(last)}
+ if len(st.str) > 0 && st.str[0] == 'B' {
+ next = st.taggedName(next)
+ }
+ }
+ case 'S':
+ next = st.substitution(true)
+ case 'I':
+ if a == nil {
+ st.fail("unexpected template arguments")
+ }
+ var args []AST
+ args = st.templateArgs()
+ tmpl := &Template{Name: a, Args: args}
+ if cast != nil {
+ st.setTemplate(cast, tmpl)
+ st.clearTemplateArgs(args)
+ cast = nil
+ }
+ a = nil
+ next = tmpl
+ case 'T':
+ next = st.templateParam()
+ case 'E':
+ if a == nil {
+ st.fail("expected prefix")
+ }
+ if cast != nil {
+ var toTmpl *Template
+ if castTempl, ok := cast.To.(*Template); ok {
+ toTmpl = castTempl
+ }
+ st.setTemplate(cast, toTmpl)
+ }
+ return a
+ case 'M':
+ if a == nil {
+ st.fail("unexpected lambda initializer")
+ }
+ // This is the initializer scope for a
+ // lambda. We don't need to record
+ // it. The normal code will treat the
+ // variable has a type scope, which
+ // gives appropriate output.
+ st.advance(1)
+ continue
+ case 'J':
+ // It appears that in some cases clang
+ // can emit a J for a template arg
+ // without the expected I. I don't
+ // know when this happens, but I've
+ // seen it in some large C++ programs.
+ if a == nil {
+ st.fail("unexpected template arguments")
+ }
+ var args []AST
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ arg := st.templateArg()
+ args = append(args, arg)
+ }
+ st.advance(1)
+ tmpl := &Template{Name: a, Args: args}
+ if cast != nil {
+ st.setTemplate(cast, tmpl)
+ st.clearTemplateArgs(args)
+ cast = nil
+ }
+ a = nil
+ next = tmpl
+ default:
+ st.fail("unrecognized letter in prefix")
+ }
+ }
+ last = next
+ if a == nil {
+ a = next
+ } else {
+ a = &Qualified{Scope: a, Name: next, LocalName: false}
+ }
+
+ if c != 'S' && (len(st.str) == 0 || st.str[0] != 'E') {
+ st.subs.add(a)
+ }
+ }
+}
+
+// unqualifiedName parses:
+//
+// <unqualified-name> ::= <operator-name>
+// ::= <ctor-dtor-name>
+// ::= <source-name>
+// ::= <local-source-name>
+//
+// <local-source-name> ::= L <source-name> <discriminator>
+func (st *state) unqualifiedName() (r AST, isCast bool) {
+ if len(st.str) < 1 {
+ st.fail("expected unqualified name")
+ }
+ var a AST
+ isCast = false
+ c := st.str[0]
+ if isDigit(c) {
+ a = st.sourceName()
+ } else if isLower(c) {
+ a, _ = st.operatorName(false)
+ if _, ok := a.(*Cast); ok {
+ isCast = true
+ }
+ if op, ok := a.(*Operator); ok && op.Name == `operator"" ` {
+ n := st.sourceName()
+ a = &Unary{Op: op, Expr: n, Suffix: false, SizeofType: false}
+ }
+ } else if c == 'D' && len(st.str) > 1 && st.str[1] == 'C' {
+ var bindings []AST
+ st.advance(2)
+ for {
+ binding := st.sourceName()
+ bindings = append(bindings, binding)
+ if len(st.str) > 0 && st.str[0] == 'E' {
+ st.advance(1)
+ break
+ }
+ }
+ a = &StructuredBindings{Bindings: bindings}
+ } else {
+ switch c {
+ case 'C', 'D':
+ st.fail("constructor/destructor not in nested name")
+ case 'L':
+ st.advance(1)
+ a = st.sourceName()
+ a = st.discriminator(a)
+ case 'U':
+ if len(st.str) < 2 {
+ st.advance(1)
+ st.fail("expected closure or unnamed type")
+ }
+ c := st.str[1]
+ switch c {
+ case 'b':
+ st.advance(2)
+ st.compactNumber()
+ a = &Name{Name: "'block-literal'"}
+ case 'l':
+ a = st.closureTypeName()
+ case 't':
+ a = st.unnamedTypeName()
+ default:
+ st.advance(1)
+ st.fail("expected closure or unnamed type")
+ }
+ default:
+ st.fail("expected unqualified name")
+ }
+ }
+
+ if len(st.str) > 0 && st.str[0] == 'B' {
+ a = st.taggedName(a)
+ }
+
+ return a, isCast
+}
+
+// sourceName parses:
+//
+// <source-name> ::= <(positive length) number> <identifier>
+// identifier ::= <(unqualified source code identifier)>
+func (st *state) sourceName() AST {
+ val := st.number()
+ if val <= 0 {
+ st.fail("expected positive number")
+ }
+ if len(st.str) < val {
+ st.fail("not enough characters for identifier")
+ }
+ id := st.str[:val]
+ st.advance(val)
+
+ // Look for GCC encoding of anonymous namespace, and make it
+ // more friendly.
+ const anonPrefix = "_GLOBAL_"
+ if strings.HasPrefix(id, anonPrefix) && len(id) > len(anonPrefix)+2 {
+ c1 := id[len(anonPrefix)]
+ c2 := id[len(anonPrefix)+1]
+ if (c1 == '.' || c1 == '_' || c1 == '$') && c2 == 'N' {
+ id = "(anonymous namespace)"
+ }
+ }
+
+ n := &Name{Name: id}
+ return n
+}
+
+// number parses:
+//
+// number ::= [n] <(non-negative decimal integer)>
+func (st *state) number() int {
+ neg := false
+ if len(st.str) > 0 && st.str[0] == 'n' {
+ neg = true
+ st.advance(1)
+ }
+ if len(st.str) == 0 || !isDigit(st.str[0]) {
+ st.fail("missing number")
+ }
+ val := 0
+ for len(st.str) > 0 && isDigit(st.str[0]) {
+ // Number picked to ensure we can't overflow with 32-bit int.
+ // Any very large number here is bogus.
+ if val >= 0x80000000/10-10 {
+ st.fail("numeric overflow")
+ }
+ val = val*10 + int(st.str[0]-'0')
+ st.advance(1)
+ }
+ if neg {
+ val = -val
+ }
+ return val
+}
+
+// seqID parses:
+//
+// <seq-id> ::= <0-9A-Z>+
+//
+// We expect this to be followed by an underscore.
+func (st *state) seqID(eofOK bool) int {
+ if len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ return 0
+ }
+ id := 0
+ for {
+ if len(st.str) == 0 {
+ if eofOK {
+ return id + 1
+ }
+ st.fail("missing end to sequence ID")
+ }
+ // Don't overflow a 32-bit int.
+ if id >= 0x80000000/36-36 {
+ st.fail("sequence ID overflow")
+ }
+ c := st.str[0]
+ if c == '_' {
+ st.advance(1)
+ return id + 1
+ }
+ if isDigit(c) {
+ id = id*36 + int(c-'0')
+ } else if isUpper(c) {
+ id = id*36 + int(c-'A') + 10
+ } else {
+ st.fail("invalid character in sequence ID")
+ }
+ st.advance(1)
+ }
+}
+
+// An operator is the demangled name, and the number of arguments it
+// takes in an expression.
+type operator struct {
+ name string
+ args int
+}
+
+// The operators map maps the mangled operator names to information
+// about them.
+var operators = map[string]operator{
+ "aN": {"&=", 2},
+ "aS": {"=", 2},
+ "aa": {"&&", 2},
+ "ad": {"&", 1},
+ "an": {"&", 2},
+ "at": {"alignof ", 1},
+ "aw": {"co_await ", 1},
+ "az": {"alignof ", 1},
+ "cc": {"const_cast", 2},
+ "cl": {"()", 2},
+ // cp is not in the ABI but is used by clang "when the call
+ // would use ADL except for being parenthesized."
+ "cp": {"()", 2},
+ "cm": {",", 2},
+ "co": {"~", 1},
+ "dV": {"/=", 2},
+ "dX": {"[...]=", 3},
+ "da": {"delete[] ", 1},
+ "dc": {"dynamic_cast", 2},
+ "de": {"*", 1},
+ "di": {"=", 2},
+ "dl": {"delete ", 1},
+ "ds": {".*", 2},
+ "dt": {".", 2},
+ "dv": {"/", 2},
+ "dx": {"]=", 2},
+ "eO": {"^=", 2},
+ "eo": {"^", 2},
+ "eq": {"==", 2},
+ "fl": {"...", 2},
+ "fr": {"...", 2},
+ "fL": {"...", 3},
+ "fR": {"...", 3},
+ "ge": {">=", 2},
+ "gs": {"::", 1},
+ "gt": {">", 2},
+ "ix": {"[]", 2},
+ "lS": {"<<=", 2},
+ "le": {"<=", 2},
+ "li": {`operator"" `, 1},
+ "ls": {"<<", 2},
+ "lt": {"<", 2},
+ "mI": {"-=", 2},
+ "mL": {"*=", 2},
+ "mi": {"-", 2},
+ "ml": {"*", 2},
+ "mm": {"--", 1},
+ "na": {"new[]", 3},
+ "ne": {"!=", 2},
+ "ng": {"-", 1},
+ "nt": {"!", 1},
+ "nw": {"new", 3},
+ "nx": {"noexcept", 1},
+ "oR": {"|=", 2},
+ "oo": {"||", 2},
+ "or": {"|", 2},
+ "pL": {"+=", 2},
+ "pl": {"+", 2},
+ "pm": {"->*", 2},
+ "pp": {"++", 1},
+ "ps": {"+", 1},
+ "pt": {"->", 2},
+ "qu": {"?", 3},
+ "rM": {"%=", 2},
+ "rS": {">>=", 2},
+ "rc": {"reinterpret_cast", 2},
+ "rm": {"%", 2},
+ "rs": {">>", 2},
+ "sP": {"sizeof...", 1},
+ "sZ": {"sizeof...", 1},
+ "sc": {"static_cast", 2},
+ "ss": {"<=>", 2},
+ "st": {"sizeof ", 1},
+ "sz": {"sizeof ", 1},
+ "tr": {"throw", 0},
+ "tw": {"throw ", 1},
+}
+
+// operatorName parses:
+//
+// operator_name ::= many different two character encodings.
+// ::= cv <type>
+// ::= v <digit> <source-name>
+//
+// We need to know whether we are in an expression because it affects
+// how we handle template parameters in the type of a cast operator.
+func (st *state) operatorName(inExpression bool) (AST, int) {
+ if len(st.str) < 2 {
+ st.fail("missing operator code")
+ }
+ code := st.str[:2]
+ st.advance(2)
+ if code[0] == 'v' && isDigit(code[1]) {
+ name := st.sourceName()
+ return &Operator{Name: name.(*Name).Name}, int(code[1] - '0')
+ } else if code == "cv" {
+ // Push a nil on templates to indicate that template
+ // parameters will have their template filled in
+ // later.
+ if !inExpression {
+ st.templates = append(st.templates, nil)
+ }
+
+ t := st.demangleType(!inExpression)
+
+ if !inExpression {
+ st.templates = st.templates[:len(st.templates)-1]
+ }
+
+ return &Cast{To: t}, 1
+ } else if op, ok := operators[code]; ok {
+ return &Operator{Name: op.name}, op.args
+ } else {
+ st.failEarlier("unrecognized operator code", 2)
+ panic("not reached")
+ }
+}
+
+// localName parses:
+//
+// <local-name> ::= Z <(function) encoding> E <(entity) name> [<discriminator>]
+// ::= Z <(function) encoding> E s [<discriminator>]
+// ::= Z <(function) encoding> E d [<parameter> number>] _ <entity name>
+func (st *state) localName() AST {
+ st.checkChar('Z')
+ fn := st.encoding(true, forLocalName)
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after local name")
+ }
+ st.advance(1)
+ if len(st.str) > 0 && st.str[0] == 's' {
+ st.advance(1)
+ var n AST = &Name{Name: "string literal"}
+ n = st.discriminator(n)
+ return &Qualified{Scope: fn, Name: n, LocalName: true}
+ } else {
+ num := -1
+ if len(st.str) > 0 && st.str[0] == 'd' {
+ // Default argument scope.
+ st.advance(1)
+ num = st.compactNumber()
+ }
+ n := st.name()
+ n = st.discriminator(n)
+ if num >= 0 {
+ n = &DefaultArg{Num: num, Arg: n}
+ }
+ return &Qualified{Scope: fn, Name: n, LocalName: true}
+ }
+}
+
+// Parse a Java resource special-name.
+func (st *state) javaResource() AST {
+ off := st.off
+ ln := st.number()
+ if ln <= 1 {
+ st.failEarlier("java resource length less than 1", st.off-off)
+ }
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after number")
+ }
+ st.advance(1)
+ ln--
+ if len(st.str) < ln {
+ st.fail("not enough characters for java resource length")
+ }
+ str := st.str[:ln]
+ final := ""
+ st.advance(ln)
+ for i := 0; i < len(str); i++ {
+ if str[i] != '$' {
+ final += string(str[i])
+ } else {
+ if len(str) <= i+1 {
+ st.failEarlier("java resource escape at end of string", 1)
+ }
+ i++
+ r, ok := map[byte]string{
+ 'S': "/",
+ '_': ".",
+ '$': "$",
+ }[str[i]]
+ if !ok {
+ st.failEarlier("unrecognized java resource escape", ln-i-1)
+ }
+ final += r
+ }
+ }
+ return &Special{Prefix: "java resource ", Val: &Name{Name: final}}
+}
+
+// specialName parses:
+//
+// <special-name> ::= TV <type>
+// ::= TT <type>
+// ::= TI <type>
+// ::= TS <type>
+// ::= TA <template-arg>
+// ::= GV <(object) name>
+// ::= T <call-offset> <(base) encoding>
+// ::= Tc <call-offset> <call-offset> <(base) encoding>
+// g++ extensions:
+// ::= TC <type> <(offset) number> _ <(base) type>
+// ::= TF <type>
+// ::= TJ <type>
+// ::= GR <name>
+// ::= GA <encoding>
+// ::= Gr <resource name>
+// ::= GTt <encoding>
+// ::= GTn <encoding>
+func (st *state) specialName() AST {
+ if st.str[0] == 'T' {
+ st.advance(1)
+ if len(st.str) == 0 {
+ st.fail("expected special name code")
+ }
+ c := st.str[0]
+ st.advance(1)
+ switch c {
+ case 'V':
+ t := st.demangleType(false)
+ return &Special{Prefix: "vtable for ", Val: t}
+ case 'T':
+ t := st.demangleType(false)
+ return &Special{Prefix: "VTT for ", Val: t}
+ case 'I':
+ t := st.demangleType(false)
+ return &Special{Prefix: "typeinfo for ", Val: t}
+ case 'S':
+ t := st.demangleType(false)
+ return &Special{Prefix: "typeinfo name for ", Val: t}
+ case 'A':
+ t := st.templateArg()
+ return &Special{Prefix: "template parameter object for ", Val: t}
+ case 'h':
+ st.callOffset('h')
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "non-virtual thunk to ", Val: v}
+ case 'v':
+ st.callOffset('v')
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "virtual thunk to ", Val: v}
+ case 'c':
+ st.callOffset(0)
+ st.callOffset(0)
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "covariant return thunk to ", Val: v}
+ case 'C':
+ derived := st.demangleType(false)
+ off := st.off
+ offset := st.number()
+ if offset < 0 {
+ st.failEarlier("expected positive offset", st.off-off)
+ }
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after number")
+ }
+ st.advance(1)
+ base := st.demangleType(false)
+ return &Special2{Prefix: "construction vtable for ", Val1: base, Middle: "-in-", Val2: derived}
+ case 'F':
+ t := st.demangleType(false)
+ return &Special{Prefix: "typeinfo fn for ", Val: t}
+ case 'J':
+ t := st.demangleType(false)
+ return &Special{Prefix: "java Class for ", Val: t}
+ case 'H':
+ n := st.name()
+ return &Special{Prefix: "TLS init function for ", Val: n}
+ case 'W':
+ n := st.name()
+ return &Special{Prefix: "TLS wrapper function for ", Val: n}
+ default:
+ st.fail("unrecognized special T name code")
+ panic("not reached")
+ }
+ } else {
+ st.checkChar('G')
+ if len(st.str) == 0 {
+ st.fail("expected special name code")
+ }
+ c := st.str[0]
+ st.advance(1)
+ switch c {
+ case 'V':
+ n := st.name()
+ return &Special{Prefix: "guard variable for ", Val: n}
+ case 'R':
+ n := st.name()
+ st.seqID(true)
+ return &Special{Prefix: "reference temporary for ", Val: n}
+ case 'A':
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "hidden alias for ", Val: v}
+ case 'T':
+ if len(st.str) == 0 {
+ st.fail("expected special GT name code")
+ }
+ c := st.str[0]
+ st.advance(1)
+ v := st.encoding(true, notForLocalName)
+ switch c {
+ case 'n':
+ return &Special{Prefix: "non-transaction clone for ", Val: v}
+ default:
+ // The proposal is that different
+ // letters stand for different types
+ // of transactional cloning. Treat
+ // them all the same for now.
+ fallthrough
+ case 't':
+ return &Special{Prefix: "transaction clone for ", Val: v}
+ }
+ case 'r':
+ return st.javaResource()
+ default:
+ st.fail("unrecognized special G name code")
+ panic("not reached")
+ }
+ }
+}
+
+// callOffset parses:
+//
+// <call-offset> ::= h <nv-offset> _
+// ::= v <v-offset> _
+//
+// <nv-offset> ::= <(offset) number>
+//
+// <v-offset> ::= <(offset) number> _ <(virtual offset) number>
+//
+// The c parameter, if not 0, is a character we just read which is the
+// start of the <call-offset>.
+//
+// We don't display the offset information anywhere.
+func (st *state) callOffset(c byte) {
+ if c == 0 {
+ if len(st.str) == 0 {
+ st.fail("missing call offset")
+ }
+ c = st.str[0]
+ st.advance(1)
+ }
+ switch c {
+ case 'h':
+ st.number()
+ case 'v':
+ st.number()
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after number")
+ }
+ st.advance(1)
+ st.number()
+ default:
+ st.failEarlier("unrecognized call offset code", 1)
+ }
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after call offset")
+ }
+ st.advance(1)
+}
+
+// builtinTypes maps the type letter to the type name.
+var builtinTypes = map[byte]string{
+ 'a': "signed char",
+ 'b': "bool",
+ 'c': "char",
+ 'd': "double",
+ 'e': "long double",
+ 'f': "float",
+ 'g': "__float128",
+ 'h': "unsigned char",
+ 'i': "int",
+ 'j': "unsigned int",
+ 'l': "long",
+ 'm': "unsigned long",
+ 'n': "__int128",
+ 'o': "unsigned __int128",
+ 's': "short",
+ 't': "unsigned short",
+ 'v': "void",
+ 'w': "wchar_t",
+ 'x': "long long",
+ 'y': "unsigned long long",
+ 'z': "...",
+}
+
+// demangleType parses:
+//
+// <type> ::= <builtin-type>
+// ::= <function-type>
+// ::= <class-enum-type>
+// ::= <array-type>
+// ::= <pointer-to-member-type>
+// ::= <template-param>
+// ::= <template-template-param> <template-args>
+// ::= <substitution>
+// ::= <CV-qualifiers> <type>
+// ::= P <type>
+// ::= R <type>
+// ::= O <type> (C++0x)
+// ::= C <type>
+// ::= G <type>
+// ::= U <source-name> <type>
+//
+// <builtin-type> ::= various one letter codes
+// ::= u <source-name>
+func (st *state) demangleType(isCast bool) AST {
+ if len(st.str) == 0 {
+ st.fail("expected type")
+ }
+
+ addSubst := true
+
+ q := st.cvQualifiers()
+ if q != nil {
+ if len(st.str) == 0 {
+ st.fail("expected type")
+ }
+
+ // CV-qualifiers before a function type apply to
+ // 'this', so avoid adding the unqualified function
+ // type to the substitution list.
+ if st.str[0] == 'F' {
+ addSubst = false
+ }
+ }
+
+ var ret AST
+
+ // Use correct substitution for a template parameter.
+ var sub AST
+
+ if btype, ok := builtinTypes[st.str[0]]; ok {
+ ret = &BuiltinType{Name: btype}
+ st.advance(1)
+ if q != nil {
+ ret = &TypeWithQualifiers{Base: ret, Qualifiers: q}
+ st.subs.add(ret)
+ }
+ return ret
+ }
+ c := st.str[0]
+ switch c {
+ case 'u':
+ st.advance(1)
+ ret = st.sourceName()
+ case 'F':
+ ret = st.functionType()
+ case 'N', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ ret = st.name()
+ case 'A':
+ ret = st.arrayType(isCast)
+ case 'M':
+ ret = st.pointerToMemberType(isCast)
+ case 'T':
+ if len(st.str) > 1 && (st.str[1] == 's' || st.str[1] == 'u' || st.str[1] == 'e') {
+ c = st.str[1]
+ st.advance(2)
+ ret = st.name()
+ var kind string
+ switch c {
+ case 's':
+ kind = "struct"
+ case 'u':
+ kind = "union"
+ case 'e':
+ kind = "enum"
+ }
+ ret = &ElaboratedType{Kind: kind, Type: ret}
+ break
+ }
+
+ ret = st.templateParam()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ // See the function comment to explain this.
+ if !isCast {
+ st.subs.add(ret)
+ args := st.templateArgs()
+ ret = &Template{Name: ret, Args: args}
+ } else {
+ ret = st.demangleCastTemplateArgs(ret, true)
+ }
+ }
+ case 'S':
+ // If this is a special substitution, then it
+ // is the start of <class-enum-type>.
+ var c2 byte
+ if len(st.str) > 1 {
+ c2 = st.str[1]
+ }
+ if isDigit(c2) || c2 == '_' || isUpper(c2) {
+ ret = st.substitution(false)
+ if len(st.str) == 0 || st.str[0] != 'I' {
+ addSubst = false
+ } else {
+ // See the function comment to explain this.
+ if _, ok := ret.(*TemplateParam); !ok || !isCast {
+ args := st.templateArgs()
+ ret = &Template{Name: ret, Args: args}
+ } else {
+ next := st.demangleCastTemplateArgs(ret, false)
+ if next == ret {
+ addSubst = false
+ }
+ ret = next
+ }
+ }
+ } else {
+ ret = st.name()
+ // This substitution is not itself a
+ // substitution candidate, unless template
+ // arguments were added.
+ if ret == subAST[c2] || ret == verboseAST[c2] {
+ addSubst = false
+ }
+ }
+ case 'O', 'P', 'R', 'C', 'G':
+ st.advance(1)
+ t := st.demangleType(isCast)
+ switch c {
+ case 'O':
+ ret = &RvalueReferenceType{Base: t}
+ case 'P':
+ ret = &PointerType{Base: t}
+ case 'R':
+ ret = &ReferenceType{Base: t}
+ case 'C':
+ ret = &ComplexType{Base: t}
+ case 'G':
+ ret = &ImaginaryType{Base: t}
+ }
+ case 'U':
+ if len(st.str) < 2 {
+ st.fail("expected source name or unnamed type")
+ }
+ switch st.str[1] {
+ case 'l':
+ ret = st.closureTypeName()
+ addSubst = false
+ case 't':
+ ret = st.unnamedTypeName()
+ addSubst = false
+ default:
+ st.advance(1)
+ n := st.sourceName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ t := st.demangleType(isCast)
+ ret = &VendorQualifier{Qualifier: n, Type: t}
+ }
+ case 'D':
+ st.advance(1)
+ if len(st.str) == 0 {
+ st.fail("expected D code for type")
+ }
+ addSubst = false
+ c2 := st.str[0]
+ st.advance(1)
+ switch c2 {
+ case 'T', 't':
+ // decltype(expression)
+ ret = st.expression()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after expression in type")
+ }
+ st.advance(1)
+ ret = &Decltype{Expr: ret}
+ addSubst = true
+
+ case 'p':
+ t := st.demangleType(isCast)
+ pack := st.findArgumentPack(t)
+ ret = &PackExpansion{Base: t, Pack: pack}
+ addSubst = true
+
+ case 'a':
+ ret = &Name{Name: "auto"}
+ case 'c':
+ ret = &Name{Name: "decltype(auto)"}
+
+ case 'f':
+ ret = &BuiltinType{Name: "decimal32"}
+ case 'd':
+ ret = &BuiltinType{Name: "decimal64"}
+ case 'e':
+ ret = &BuiltinType{Name: "decimal128"}
+ case 'h':
+ ret = &BuiltinType{Name: "half"}
+ case 'u':
+ ret = &BuiltinType{Name: "char8_t"}
+ case 's':
+ ret = &BuiltinType{Name: "char16_t"}
+ case 'i':
+ ret = &BuiltinType{Name: "char32_t"}
+ case 'n':
+ ret = &BuiltinType{Name: "decltype(nullptr)"}
+
+ case 'F':
+ accum := false
+ bits := 0
+ if len(st.str) > 0 && isDigit(st.str[0]) {
+ accum = true
+ bits = st.number()
+ }
+ if len(st.str) > 0 && st.str[0] == '_' {
+ if bits == 0 {
+ st.fail("expected non-zero number of bits")
+ }
+ st.advance(1)
+ ret = &BinaryFP{Bits: bits}
+ } else {
+ base := st.demangleType(isCast)
+ if len(st.str) > 0 && isDigit(st.str[0]) {
+ // We don't care about the bits.
+ st.number()
+ }
+ sat := false
+ if len(st.str) > 0 {
+ if st.str[0] == 's' {
+ sat = true
+ }
+ st.advance(1)
+ }
+ ret = &FixedType{Base: base, Accum: accum, Sat: sat}
+ }
+
+ case 'v':
+ ret = st.vectorType(isCast)
+ addSubst = true
+
+ default:
+ st.fail("unrecognized D code in type")
+ }
+
+ default:
+ st.fail("unrecognized type code")
+ }
+
+ if addSubst {
+ if sub != nil {
+ st.subs.add(sub)
+ } else {
+ st.subs.add(ret)
+ }
+ }
+
+ if q != nil {
+ if _, ok := ret.(*FunctionType); ok {
+ ret = &MethodWithQualifiers{Method: ret, Qualifiers: q, RefQualifier: ""}
+ } else if mwq, ok := ret.(*MethodWithQualifiers); ok {
+ // Merge adjacent qualifiers. This case
+ // happens with a function with a trailing
+ // ref-qualifier.
+ mwq.Qualifiers = mergeQualifiers(q, mwq.Qualifiers)
+ } else {
+ // Merge adjacent qualifiers. This case
+ // happens with multi-dimensional array types.
+ if qsub, ok := ret.(*TypeWithQualifiers); ok {
+ q = mergeQualifiers(q, qsub.Qualifiers)
+ ret = qsub.Base
+ }
+ ret = &TypeWithQualifiers{Base: ret, Qualifiers: q}
+ }
+ st.subs.add(ret)
+ }
+
+ return ret
+}
+
+// demangleCastTemplateArgs is for a rather hideous parse. When we
+// see a template-param followed by a template-args, we need to decide
+// whether we have a template-param or a template-template-param.
+// Normally it is template-template-param, meaning that we pick up the
+// template arguments here. But, if we are parsing the type for a
+// cast operator, then the only way this can be template-template-param
+// is if there is another set of template-args immediately after this
+// set. That would look like this:
+//
+// <nested-name>
+// -> <template-prefix> <template-args>
+// -> <prefix> <template-unqualified-name> <template-args>
+// -> <unqualified-name> <template-unqualified-name> <template-args>
+// -> <source-name> <template-unqualified-name> <template-args>
+// -> <source-name> <operator-name> <template-args>
+// -> <source-name> cv <type> <template-args>
+// -> <source-name> cv <template-template-param> <template-args> <template-args>
+//
+// Otherwise, we have this derivation:
+//
+// <nested-name>
+// -> <template-prefix> <template-args>
+// -> <prefix> <template-unqualified-name> <template-args>
+// -> <unqualified-name> <template-unqualified-name> <template-args>
+// -> <source-name> <template-unqualified-name> <template-args>
+// -> <source-name> <operator-name> <template-args>
+// -> <source-name> cv <type> <template-args>
+// -> <source-name> cv <template-param> <template-args>
+//
+// in which the template-args are actually part of the prefix. For
+// the special case where this arises, demangleType is called with
+// isCast as true. This function is then responsible for checking
+// whether we see <template-param> <template-args> but there is not
+// another following <template-args>. In that case, we reset the
+// parse and just return the <template-param>.
+func (st *state) demangleCastTemplateArgs(tp AST, addSubst bool) AST {
+ save := st.copy()
+
+ var args []AST
+ failed := false
+ func() {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(demangleErr); ok {
+ failed = true
+ } else {
+ panic(r)
+ }
+ }
+ }()
+
+ args = st.templateArgs()
+ }()
+
+ if !failed && len(st.str) > 0 && st.str[0] == 'I' {
+ if addSubst {
+ st.subs.add(tp)
+ }
+ return &Template{Name: tp, Args: args}
+ }
+ // Reset back to before we started reading the template arguments.
+ // They will be read again by st.prefix.
+ *st = *save
+ return tp
+}
+
+// mergeQualifiers merges two qualifier lists into one.
+func mergeQualifiers(q1AST, q2AST AST) AST {
+ if q1AST == nil {
+ return q2AST
+ }
+ if q2AST == nil {
+ return q1AST
+ }
+ q1 := q1AST.(*Qualifiers)
+ m := make(map[string]bool)
+ for _, qualAST := range q1.Qualifiers {
+ qual := qualAST.(*Qualifier)
+ if len(qual.Exprs) == 0 {
+ m[qual.Name] = true
+ }
+ }
+ rq := q1.Qualifiers
+ for _, qualAST := range q2AST.(*Qualifiers).Qualifiers {
+ qual := qualAST.(*Qualifier)
+ if len(qual.Exprs) > 0 {
+ rq = append(rq, qualAST)
+ } else if !m[qual.Name] {
+ rq = append(rq, qualAST)
+ m[qual.Name] = true
+ }
+ }
+ q1.Qualifiers = rq
+ return q1
+}
+
+// qualifiers maps from the character used in the mangled name to the
+// string to print.
+var qualifiers = map[byte]string{
+ 'r': "restrict",
+ 'V': "volatile",
+ 'K': "const",
+}
+
+// cvQualifiers parses:
+//
+// <CV-qualifiers> ::= [r] [V] [K]
+func (st *state) cvQualifiers() AST {
+ var q []AST
+qualLoop:
+ for len(st.str) > 0 {
+ if qv, ok := qualifiers[st.str[0]]; ok {
+ qual := &Qualifier{Name: qv}
+ q = append([]AST{qual}, q...)
+ st.advance(1)
+ } else if len(st.str) > 1 && st.str[0] == 'D' {
+ var qual AST
+ switch st.str[1] {
+ case 'x':
+ qual = &Qualifier{Name: "transaction_safe"}
+ st.advance(2)
+ case 'o':
+ qual = &Qualifier{Name: "noexcept"}
+ st.advance(2)
+ case 'O':
+ st.advance(2)
+ expr := st.expression()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after computed noexcept expression")
+ }
+ st.advance(1)
+ qual = &Qualifier{Name: "noexcept", Exprs: []AST{expr}}
+ case 'w':
+ st.advance(2)
+ parmlist := st.parmlist()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after throw parameter list")
+ }
+ st.advance(1)
+ qual = &Qualifier{Name: "throw", Exprs: parmlist}
+ default:
+ break qualLoop
+ }
+ q = append([]AST{qual}, q...)
+ } else {
+ break
+ }
+ }
+ if len(q) == 0 {
+ return nil
+ }
+ return &Qualifiers{Qualifiers: q}
+}
+
+// refQualifier parses:
+//
+// <ref-qualifier> ::= R
+// ::= O
+func (st *state) refQualifier() string {
+ if len(st.str) > 0 {
+ switch st.str[0] {
+ case 'R':
+ st.advance(1)
+ return "&"
+ case 'O':
+ st.advance(1)
+ return "&&"
+ }
+ }
+ return ""
+}
+
+// parmlist parses:
+//
+// <type>+
+func (st *state) parmlist() []AST {
+ var ret []AST
+ for {
+ if len(st.str) < 1 {
+ break
+ }
+ if st.str[0] == 'E' || st.str[0] == '.' {
+ break
+ }
+ if (st.str[0] == 'R' || st.str[0] == 'O') && len(st.str) > 1 && st.str[1] == 'E' {
+ // This is a function ref-qualifier.
+ break
+ }
+ ptype := st.demangleType(false)
+ ret = append(ret, ptype)
+ }
+
+ // There should always be at least one type. A function that
+ // takes no arguments will have a single parameter type
+ // "void".
+ if len(ret) == 0 {
+ st.fail("expected at least one type in type list")
+ }
+
+ // Omit a single parameter type void.
+ if len(ret) == 1 {
+ if bt, ok := ret[0].(*BuiltinType); ok && bt.Name == "void" {
+ ret = nil
+ }
+ }
+
+ return ret
+}
+
+// functionType parses:
+//
+// <function-type> ::= F [Y] <bare-function-type> [<ref-qualifier>] E
+func (st *state) functionType() AST {
+ st.checkChar('F')
+ if len(st.str) > 0 && st.str[0] == 'Y' {
+ // Function has C linkage. We don't print this.
+ st.advance(1)
+ }
+ ret := st.bareFunctionType(true)
+ r := st.refQualifier()
+ if r != "" {
+ ret = &MethodWithQualifiers{Method: ret, Qualifiers: nil, RefQualifier: r}
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after function type")
+ }
+ st.advance(1)
+ return ret
+}
+
+// bareFunctionType parses:
+//
+// <bare-function-type> ::= [J]<type>+
+func (st *state) bareFunctionType(hasReturnType bool) AST {
+ if len(st.str) > 0 && st.str[0] == 'J' {
+ hasReturnType = true
+ st.advance(1)
+ }
+ var returnType AST
+ if hasReturnType {
+ returnType = st.demangleType(false)
+ }
+ types := st.parmlist()
+ return &FunctionType{
+ Return: returnType,
+ Args: types,
+ ForLocalName: false, // may be set later in encoding
+ }
+}
+
+// arrayType parses:
+//
+// <array-type> ::= A <(positive dimension) number> _ <(element) type>
+// ::= A [<(dimension) expression>] _ <(element) type>
+func (st *state) arrayType(isCast bool) AST {
+ st.checkChar('A')
+
+ if len(st.str) == 0 {
+ st.fail("missing array dimension")
+ }
+
+ var dim AST
+ if st.str[0] == '_' {
+ dim = &Name{Name: ""}
+ } else if isDigit(st.str[0]) {
+ i := 1
+ for len(st.str) > i && isDigit(st.str[i]) {
+ i++
+ }
+ dim = &Name{Name: st.str[:i]}
+ st.advance(i)
+ } else {
+ dim = st.expression()
+ }
+
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after dimension")
+ }
+ st.advance(1)
+
+ t := st.demangleType(isCast)
+
+ arr := &ArrayType{Dimension: dim, Element: t}
+
+ // Qualifiers on the element of an array type go on the whole
+ // array type.
+ if q, ok := arr.Element.(*TypeWithQualifiers); ok {
+ return &TypeWithQualifiers{Base: &ArrayType{Dimension: dim, Element: q.Base}, Qualifiers: q.Qualifiers}
+ }
+
+ return arr
+}
+
+// vectorType parses:
+//
+// <vector-type> ::= Dv <number> _ <type>
+// ::= Dv _ <expression> _ <type>
+func (st *state) vectorType(isCast bool) AST {
+ if len(st.str) == 0 {
+ st.fail("expected vector dimension")
+ }
+
+ var dim AST
+ if st.str[0] == '_' {
+ st.advance(1)
+ dim = st.expression()
+ } else {
+ num := st.number()
+ dim = &Name{Name: fmt.Sprintf("%d", num)}
+ }
+
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after vector dimension")
+ }
+ st.advance(1)
+
+ t := st.demangleType(isCast)
+
+ return &VectorType{Dimension: dim, Base: t}
+}
+
+// pointerToMemberType parses:
+//
+// <pointer-to-member-type> ::= M <(class) type> <(member) type>
+func (st *state) pointerToMemberType(isCast bool) AST {
+ st.checkChar('M')
+ cl := st.demangleType(false)
+
+ // The ABI says, "The type of a non-static member function is
+ // considered to be different, for the purposes of
+ // substitution, from the type of a namespace-scope or static
+ // member function whose type appears similar. The types of
+ // two non-static member functions are considered to be
+ // different, for the purposes of substitution, if the
+ // functions are members of different classes. In other words,
+ // for the purposes of substitution, the class of which the
+ // function is a member is considered part of the type of
+ // function."
+ //
+ // For a pointer to member function, this call to demangleType
+ // will end up adding a (possibly qualified) non-member
+ // function type to the substitution table, which is not
+ // correct; however, the member function type will never be
+ // used in a substitution, so putting the wrong type in the
+ // substitution table is harmless.
+ mem := st.demangleType(isCast)
+ return &PtrMem{Class: cl, Member: mem}
+}
+
+// compactNumber parses:
+//
+// <non-negative number> _
+func (st *state) compactNumber() int {
+ if len(st.str) == 0 {
+ st.fail("missing index")
+ }
+ if st.str[0] == '_' {
+ st.advance(1)
+ return 0
+ } else if st.str[0] == 'n' {
+ st.fail("unexpected negative number")
+ }
+ n := st.number()
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("missing underscore after number")
+ }
+ st.advance(1)
+ return n + 1
+}
+
+// templateParam parses:
+//
+// <template-param> ::= T_
+// ::= T <(parameter-2 non-negative) number> _
+// ::= TL <level-1> __
+// ::= TL <level-1> _ <parameter-2 non-negative number> _
+//
+// When a template parameter is a substitution candidate, any
+// reference to that substitution refers to the template parameter
+// with the same index in the currently active template, not to
+// whatever the template parameter would be expanded to here. We sort
+// this out in substitution and simplify.
+func (st *state) templateParam() AST {
+ off := st.off
+ st.checkChar('T')
+
+ level := 0
+ if len(st.str) > 0 && st.str[0] == 'L' {
+ st.advance(1)
+ level = st.compactNumber()
+ }
+
+ n := st.compactNumber()
+
+ if level >= len(st.templates) {
+ if st.lambdaTemplateLevel > 0 && level == st.lambdaTemplateLevel-1 {
+ // Lambda auto params are mangled as template params.
+ // See https://gcc.gnu.org/PR78252.
+ return &LambdaAuto{Index: n}
+ }
+ st.failEarlier(fmt.Sprintf("template parameter is not in scope of template (level %d >= %d)", level, len(st.templates)), st.off-off)
+ }
+
+ template := st.templates[level]
+
+ if template == nil {
+ // We are parsing a cast operator. If the cast is
+ // itself a template, then this is a forward
+ // reference. Fill it in later.
+ return &TemplateParam{Index: n, Template: nil}
+ }
+
+ if n >= len(template.Args) {
+ if st.lambdaTemplateLevel > 0 && level == st.lambdaTemplateLevel-1 {
+ // Lambda auto params are mangled as template params.
+ // See https://gcc.gnu.org/PR78252.
+ return &LambdaAuto{Index: n}
+ }
+ st.failEarlier(fmt.Sprintf("template index out of range (%d >= %d)", n, len(template.Args)), st.off-off)
+ }
+
+ return &TemplateParam{Index: n, Template: template}
+}
+
+// setTemplate sets the Template field of any TemplateParam's in a.
+// This handles the forward referencing template parameters found in
+// cast operators.
+func (st *state) setTemplate(a AST, tmpl *Template) {
+ var seen []AST
+ a.Traverse(func(a AST) bool {
+ switch a := a.(type) {
+ case *TemplateParam:
+ if a.Template != nil {
+ if tmpl != nil {
+ st.fail("duplicate template parameters")
+ }
+ return false
+ }
+ if tmpl == nil {
+ st.fail("cast template parameter not in scope of template")
+ }
+ if a.Index >= len(tmpl.Args) {
+ st.fail(fmt.Sprintf("cast template index out of range (%d >= %d)", a.Index, len(tmpl.Args)))
+ }
+ a.Template = tmpl
+ return false
+ case *Closure:
+ // There are no template params in closure types.
+ // https://gcc.gnu.org/PR78252.
+ return false
+ default:
+ for _, v := range seen {
+ if v == a {
+ return false
+ }
+ }
+ seen = append(seen, a)
+ return true
+ }
+ })
+}
+
+// clearTemplateArgs gives an error for any unset Template field in
+// args. This handles erroneous cases where a cast operator with a
+// forward referenced template is in the scope of another cast
+// operator.
+func (st *state) clearTemplateArgs(args []AST) {
+ for _, a := range args {
+ st.setTemplate(a, nil)
+ }
+}
+
+// templateArgs parses:
+//
+// <template-args> ::= I <template-arg>+ E
+func (st *state) templateArgs() []AST {
+ if len(st.str) == 0 || (st.str[0] != 'I' && st.str[0] != 'J') {
+ panic("internal error")
+ }
+ st.advance(1)
+
+ var ret []AST
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ arg := st.templateArg()
+ ret = append(ret, arg)
+ }
+ st.advance(1)
+ return ret
+}
+
+// templateArg parses:
+//
+// <template-arg> ::= <type>
+// ::= X <expression> E
+// ::= <expr-primary>
+func (st *state) templateArg() AST {
+ if len(st.str) == 0 {
+ st.fail("missing template argument")
+ }
+ switch st.str[0] {
+ case 'X':
+ st.advance(1)
+ expr := st.expression()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("missing end of expression")
+ }
+ st.advance(1)
+ return expr
+
+ case 'L':
+ return st.exprPrimary()
+
+ case 'I', 'J':
+ args := st.templateArgs()
+ return &ArgumentPack{Args: args}
+
+ default:
+ return st.demangleType(false)
+ }
+}
+
+// exprList parses a sequence of expressions up to a terminating character.
+func (st *state) exprList(stop byte) AST {
+ if len(st.str) > 0 && st.str[0] == stop {
+ st.advance(1)
+ return &ExprList{Exprs: nil}
+ }
+
+ var exprs []AST
+ for {
+ e := st.expression()
+ exprs = append(exprs, e)
+ if len(st.str) > 0 && st.str[0] == stop {
+ st.advance(1)
+ break
+ }
+ }
+ return &ExprList{Exprs: exprs}
+}
+
+// expression parses:
+//
+// <expression> ::= <(unary) operator-name> <expression>
+// ::= <(binary) operator-name> <expression> <expression>
+// ::= <(trinary) operator-name> <expression> <expression> <expression>
+// ::= pp_ <expression>
+// ::= mm_ <expression>
+// ::= cl <expression>+ E
+// ::= cl <expression>+ E
+// ::= cv <type> <expression>
+// ::= cv <type> _ <expression>* E
+// ::= tl <type> <braced-expression>* E
+// ::= il <braced-expression>* E
+// ::= [gs] nw <expression>* _ <type> E
+// ::= [gs] nw <expression>* _ <type> <initializer>
+// ::= [gs] na <expression>* _ <type> E
+// ::= [gs] na <expression>* _ <type> <initializer>
+// ::= [gs] dl <expression>
+// ::= [gs] da <expression>
+// ::= dc <type> <expression>
+// ::= sc <type> <expression>
+// ::= cc <type> <expression>
+// ::= mc <parameter type> <expr> [<offset number>] E
+// ::= rc <type> <expression>
+// ::= ti <type>
+// ::= te <expression>
+// ::= so <referent type> <expr> [<offset number>] <union-selector>* [p] E
+// ::= st <type>
+// ::= sz <expression>
+// ::= at <type>
+// ::= az <expression>
+// ::= nx <expression>
+// ::= <template-param>
+// ::= <function-param>
+// ::= dt <expression> <unresolved-name>
+// ::= pt <expression> <unresolved-name>
+// ::= ds <expression> <expression>
+// ::= sZ <template-param>
+// ::= sZ <function-param>
+// ::= sP <template-arg>* E
+// ::= sp <expression>
+// ::= fl <binary operator-name> <expression>
+// ::= fr <binary operator-name> <expression>
+// ::= fL <binary operator-name> <expression> <expression>
+// ::= fR <binary operator-name> <expression> <expression>
+// ::= tw <expression>
+// ::= tr
+// ::= u <source-name> <template-arg>* E
+// ::= <unresolved-name>
+// ::= <expr-primary>
+//
+// <function-param> ::= fp <CV-qualifiers> _
+// ::= fp <CV-qualifiers> <number>
+// ::= fL <number> p <CV-qualifiers> _
+// ::= fL <number> p <CV-qualifiers> <number>
+// ::= fpT
+//
+// <braced-expression> ::= <expression>
+// ::= di <field source-name> <braced-expression>
+// ::= dx <index expression> <braced-expression>
+// ::= dX <range begin expression> <range end expression> <braced-expression>
+func (st *state) expression() AST {
+ if len(st.str) == 0 {
+ st.fail("expected expression")
+ }
+ if st.str[0] == 'L' {
+ return st.exprPrimary()
+ } else if st.str[0] == 'T' {
+ return st.templateParam()
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'o' {
+ st.advance(2)
+ return st.subobject()
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'r' {
+ return st.unresolvedName()
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'p' {
+ st.advance(2)
+ e := st.expression()
+ pack := st.findArgumentPack(e)
+ return &PackExpansion{Base: e, Pack: pack}
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'Z' {
+ st.advance(2)
+ off := st.off
+ e := st.expression()
+ ap := st.findArgumentPack(e)
+ if ap == nil {
+ st.failEarlier("missing argument pack", st.off-off)
+ }
+ return &SizeofPack{Pack: ap}
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'P' {
+ st.advance(2)
+ var args []AST
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ arg := st.templateArg()
+ args = append(args, arg)
+ }
+ st.advance(1)
+ return &SizeofArgs{Args: args}
+ } else if st.str[0] == 'f' && len(st.str) > 1 && st.str[1] == 'p' {
+ st.advance(2)
+ if len(st.str) > 0 && st.str[0] == 'T' {
+ st.advance(1)
+ return &FunctionParam{Index: 0}
+ } else {
+ // We can see qualifiers here, but we don't
+ // include them in the demangled string.
+ st.cvQualifiers()
+ index := st.compactNumber()
+ return &FunctionParam{Index: index + 1}
+ }
+ } else if st.str[0] == 'f' && len(st.str) > 2 && st.str[1] == 'L' && isDigit(st.str[2]) {
+ st.advance(2)
+ // We don't include the scope count in the demangled string.
+ st.number()
+ if len(st.str) == 0 || st.str[0] != 'p' {
+ st.fail("expected p after function parameter scope count")
+ }
+ st.advance(1)
+ // We can see qualifiers here, but we don't include them
+ // in the demangled string.
+ st.cvQualifiers()
+ index := st.compactNumber()
+ return &FunctionParam{Index: index + 1}
+ } else if st.str[0] == 'm' && len(st.str) > 1 && st.str[1] == 'c' {
+ st.advance(2)
+ typ := st.demangleType(false)
+ expr := st.expression()
+ offset := 0
+ if len(st.str) > 0 && (st.str[0] == 'n' || isDigit(st.str[0])) {
+ offset = st.number()
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after pointer-to-member conversion")
+ }
+ st.advance(1)
+ return &PtrMemCast{
+ Type: typ,
+ Expr: expr,
+ Offset: offset,
+ }
+ } else if isDigit(st.str[0]) || (st.str[0] == 'o' && len(st.str) > 1 && st.str[1] == 'n') {
+ if st.str[0] == 'o' {
+ // Skip operator function ID.
+ st.advance(2)
+ }
+ n, _ := st.unqualifiedName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ return n
+ } else if (st.str[0] == 'i' || st.str[0] == 't') && len(st.str) > 1 && st.str[1] == 'l' {
+ // Brace-enclosed initializer list.
+ c := st.str[0]
+ st.advance(2)
+ var t AST
+ if c == 't' {
+ t = st.demangleType(false)
+ }
+ exprs := st.exprList('E')
+ return &InitializerList{Type: t, Exprs: exprs}
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 't' {
+ o, _ := st.operatorName(true)
+ t := st.demangleType(false)
+ return &Unary{Op: o, Expr: t, Suffix: false, SizeofType: true}
+ } else if st.str[0] == 'u' {
+ st.advance(1)
+ name := st.sourceName()
+ // Special case __uuidof followed by type or
+ // expression, as used by LLVM.
+ if n, ok := name.(*Name); ok && n.Name == "__uuidof" {
+ if len(st.str) < 2 {
+ st.fail("missing uuidof argument")
+ }
+ var operand AST
+ if st.str[0] == 't' {
+ st.advance(1)
+ operand = st.demangleType(false)
+ } else if st.str[0] == 'z' {
+ st.advance(1)
+ operand = st.expression()
+ }
+ if operand != nil {
+ return &Binary{
+ Op: &Operator{Name: "()"},
+ Left: name,
+ Right: &ExprList{
+ Exprs: []AST{operand},
+ },
+ }
+ }
+ }
+ var args []AST
+ for {
+ if len(st.str) == 0 {
+ st.fail("missing argument in vendor extended expressoin")
+ }
+ if st.str[0] == 'E' {
+ st.advance(1)
+ break
+ }
+ arg := st.templateArg()
+ args = append(args, arg)
+ }
+ return &Binary{
+ Op: &Operator{Name: "()"},
+ Left: name,
+ Right: &ExprList{Exprs: args},
+ }
+ } else {
+ if len(st.str) < 2 {
+ st.fail("missing operator code")
+ }
+ code := st.str[:2]
+ o, args := st.operatorName(true)
+ switch args {
+ case 0:
+ return &Nullary{Op: o}
+
+ case 1:
+ suffix := false
+ if code == "pp" || code == "mm" {
+ if len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ } else {
+ suffix = true
+ }
+ }
+ var operand AST
+ if _, ok := o.(*Cast); ok && len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ operand = st.exprList('E')
+ } else {
+ operand = st.expression()
+ }
+ return &Unary{Op: o, Expr: operand, Suffix: suffix, SizeofType: false}
+
+ case 2:
+ var left, right AST
+ if code == "sc" || code == "dc" || code == "cc" || code == "rc" {
+ left = st.demangleType(false)
+ } else if code[0] == 'f' {
+ left, _ = st.operatorName(true)
+ right = st.expression()
+ return &Fold{Left: code[1] == 'l', Op: left, Arg1: right, Arg2: nil}
+ } else if code == "di" {
+ left, _ = st.unqualifiedName()
+ } else {
+ left = st.expression()
+ }
+ if code == "cl" || code == "cp" {
+ right = st.exprList('E')
+ } else if code == "dt" || code == "pt" {
+ right = st.unresolvedName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ right = &Template{Name: right, Args: args}
+ }
+ } else {
+ right = st.expression()
+ }
+ return &Binary{Op: o, Left: left, Right: right}
+
+ case 3:
+ if code[0] == 'n' {
+ if code[1] != 'w' && code[1] != 'a' {
+ panic("internal error")
+ }
+ place := st.exprList('_')
+ if place.(*ExprList).Exprs == nil {
+ place = nil
+ }
+ t := st.demangleType(false)
+ var ini AST
+ if len(st.str) > 0 && st.str[0] == 'E' {
+ st.advance(1)
+ } else if len(st.str) > 1 && st.str[0] == 'p' && st.str[1] == 'i' {
+ // Parenthesized initializer.
+ st.advance(2)
+ ini = st.exprList('E')
+ } else if len(st.str) > 1 && st.str[0] == 'i' && st.str[1] == 'l' {
+ // Initializer list.
+ ini = st.expression()
+ } else {
+ st.fail("unrecognized new initializer")
+ }
+ return &New{Op: o, Place: place, Type: t, Init: ini}
+ } else if code[0] == 'f' {
+ first, _ := st.operatorName(true)
+ second := st.expression()
+ third := st.expression()
+ return &Fold{Left: code[1] == 'L', Op: first, Arg1: second, Arg2: third}
+ } else {
+ first := st.expression()
+ second := st.expression()
+ third := st.expression()
+ return &Trinary{Op: o, First: first, Second: second, Third: third}
+ }
+
+ default:
+ st.fail(fmt.Sprintf("unsupported number of operator arguments: %d", args))
+ panic("not reached")
+ }
+ }
+}
+
+// subobject parses:
+//
+// <expression> ::= so <referent type> <expr> [<offset number>] <union-selector>* [p] E
+// <union-selector> ::= _ [<number>]
+func (st *state) subobject() AST {
+ typ := st.demangleType(false)
+ expr := st.expression()
+ offset := 0
+ if len(st.str) > 0 && (st.str[0] == 'n' || isDigit(st.str[0])) {
+ offset = st.number()
+ }
+ var selectors []int
+ for len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ selector := 0
+ if len(st.str) > 0 && (st.str[0] == 'n' || isDigit(st.str[0])) {
+ selector = st.number()
+ }
+ selectors = append(selectors, selector)
+ }
+ pastEnd := false
+ if len(st.str) > 0 && st.str[0] == 'p' {
+ st.advance(1)
+ pastEnd = true
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after subobject")
+ }
+ st.advance(1)
+ return &Subobject{
+ Type: typ,
+ SubExpr: expr,
+ Offset: offset,
+ Selectors: selectors,
+ PastEnd: pastEnd,
+ }
+}
+
+// unresolvedName parses:
+//
+// <unresolved-name> ::= [gs] <base-unresolved-name>
+// ::= sr <unresolved-type> <base-unresolved-name>
+// ::= srN <unresolved-type> <unresolved-qualifier-level>+ E <base-unresolved-name>
+// ::= [gs] sr <unresolved-qualifier-level>+ E <base-unresolved-name>
+func (st *state) unresolvedName() AST {
+ if len(st.str) >= 2 && st.str[:2] == "gs" {
+ st.advance(2)
+ n := st.unresolvedName()
+ return &Unary{
+ Op: &Operator{Name: "::"},
+ Expr: n,
+ Suffix: false,
+ SizeofType: false,
+ }
+ } else if len(st.str) >= 2 && st.str[:2] == "sr" {
+ st.advance(2)
+ if len(st.str) == 0 {
+ st.fail("expected unresolved type")
+ }
+ switch st.str[0] {
+ case 'T', 'D', 'S':
+ t := st.demangleType(false)
+ n := st.baseUnresolvedName()
+ n = &Qualified{Scope: t, Name: n, LocalName: false}
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ st.subs.add(n)
+ }
+ return n
+ default:
+ var s AST
+ if st.str[0] == 'N' {
+ st.advance(1)
+ s = st.demangleType(false)
+ }
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ // GCC does not seem to follow the ABI here.
+ // It can emit type/name without an 'E'.
+ if s != nil && len(st.str) > 0 && !isDigit(st.str[0]) {
+ if q, ok := s.(*Qualified); ok {
+ a := q.Scope
+ if t, ok := a.(*Template); ok {
+ st.subs.add(t.Name)
+ st.subs.add(t)
+ } else {
+ st.subs.add(a)
+ }
+ return s
+ }
+ }
+ n := st.sourceName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ st.subs.add(n)
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ if s == nil {
+ s = n
+ } else {
+ s = &Qualified{Scope: s, Name: n, LocalName: false}
+ }
+ st.subs.add(s)
+ }
+ if s == nil {
+ st.fail("missing scope in unresolved name")
+ }
+ st.advance(1)
+ n := st.baseUnresolvedName()
+ return &Qualified{Scope: s, Name: n, LocalName: false}
+ }
+ } else {
+ return st.baseUnresolvedName()
+ }
+}
+
+// baseUnresolvedName parses:
+//
+// <base-unresolved-name> ::= <simple-id>
+// ::= on <operator-name>
+// ::= on <operator-name> <template-args>
+// ::= dn <destructor-name>
+//
+// <simple-id> ::= <source-name> [ <template-args> ]
+func (st *state) baseUnresolvedName() AST {
+ var n AST
+ if len(st.str) >= 2 && st.str[:2] == "on" {
+ st.advance(2)
+ n, _ = st.operatorName(true)
+ } else if len(st.str) >= 2 && st.str[:2] == "dn" {
+ st.advance(2)
+ if len(st.str) > 0 && isDigit(st.str[0]) {
+ n = st.sourceName()
+ } else {
+ n = st.demangleType(false)
+ }
+ n = &Destructor{Name: n}
+ } else if len(st.str) > 0 && isDigit(st.str[0]) {
+ n = st.sourceName()
+ } else {
+ // GCC seems to not follow the ABI here: it can have
+ // an operator name without on.
+ // See https://gcc.gnu.org/PR70182.
+ n, _ = st.operatorName(true)
+ }
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ return n
+}
+
+// exprPrimary parses:
+//
+// <expr-primary> ::= L <type> <(value) number> E
+// ::= L <type> <(value) float> E
+// ::= L <mangled-name> E
+func (st *state) exprPrimary() AST {
+ st.checkChar('L')
+ if len(st.str) == 0 {
+ st.fail("expected primary expression")
+
+ }
+
+ // Check for 'Z' here because g++ incorrectly omitted the
+ // underscore until -fabi-version=3.
+ var ret AST
+ if st.str[0] == '_' || st.str[0] == 'Z' {
+ if st.str[0] == '_' {
+ st.advance(1)
+ }
+ if len(st.str) == 0 || st.str[0] != 'Z' {
+ st.fail("expected mangled name")
+ }
+ st.advance(1)
+ ret = st.encoding(true, notForLocalName)
+ } else {
+ t := st.demangleType(false)
+
+ isArrayType := func(typ AST) bool {
+ if twq, ok := typ.(*TypeWithQualifiers); ok {
+ typ = twq.Base
+ }
+ _, ok := typ.(*ArrayType)
+ return ok
+ }
+
+ neg := false
+ if len(st.str) > 0 && st.str[0] == 'n' {
+ neg = true
+ st.advance(1)
+ }
+ if len(st.str) > 0 && st.str[0] == 'E' {
+ if bt, ok := t.(*BuiltinType); ok && bt.Name == "decltype(nullptr)" {
+ // A nullptr should not have a value.
+ // We accept one if present because GCC
+ // used to generate one.
+ // https://gcc.gnu.org/PR91979.
+ } else if cl, ok := t.(*Closure); ok {
+ // A closure doesn't have a value.
+ st.advance(1)
+ return &LambdaExpr{Type: cl}
+ } else if isArrayType(t) {
+ st.advance(1)
+ return &StringLiteral{Type: t}
+ } else {
+ st.fail("missing literal value")
+ }
+ }
+ i := 0
+ for len(st.str) > i && st.str[i] != 'E' {
+ i++
+ }
+ val := st.str[:i]
+ st.advance(i)
+ ret = &Literal{Type: t, Val: val, Neg: neg}
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after literal")
+ }
+ st.advance(1)
+ return ret
+}
+
+// discriminator parses:
+//
+// <discriminator> ::= _ <(non-negative) number> (when number < 10)
+// __ <(non-negative) number> _ (when number >= 10)
+func (st *state) discriminator(a AST) AST {
+ if len(st.str) == 0 || st.str[0] != '_' {
+ // clang can generate a discriminator at the end of
+ // the string with no underscore.
+ for i := 0; i < len(st.str); i++ {
+ if !isDigit(st.str[i]) {
+ return a
+ }
+ }
+ // Skip the trailing digits.
+ st.advance(len(st.str))
+ return a
+ }
+ off := st.off
+ st.advance(1)
+ trailingUnderscore := false
+ if len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ trailingUnderscore = true
+ }
+ d := st.number()
+ if d < 0 {
+ st.failEarlier("invalid negative discriminator", st.off-off)
+ }
+ if trailingUnderscore && d >= 10 {
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after discriminator >= 10")
+ }
+ st.advance(1)
+ }
+ // We don't currently print out the discriminator, so we don't
+ // save it.
+ return a
+}
+
+// closureTypeName parses:
+//
+// <closure-type-name> ::= Ul <lambda-sig> E [ <nonnegative number> ] _
+// <lambda-sig> ::= <parameter type>+
+func (st *state) closureTypeName() AST {
+ st.checkChar('U')
+ st.checkChar('l')
+
+ oldLambdaTemplateLevel := st.lambdaTemplateLevel
+ st.lambdaTemplateLevel = len(st.templates) + 1
+
+ var templateArgs []AST
+ var template *Template
+ for len(st.str) > 1 && st.str[0] == 'T' {
+ arg, templateVal := st.templateParamDecl()
+ if arg == nil {
+ break
+ }
+ templateArgs = append(templateArgs, arg)
+ if template == nil {
+ template = &Template{
+ Name: &Name{Name: "lambda"},
+ }
+ st.templates = append(st.templates, template)
+ }
+ template.Args = append(template.Args, templateVal)
+ }
+
+ types := st.parmlist()
+
+ st.lambdaTemplateLevel = oldLambdaTemplateLevel
+
+ if template != nil {
+ st.templates = st.templates[:len(st.templates)-1]
+ }
+
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after closure type name")
+ }
+ st.advance(1)
+ num := st.compactNumber()
+ return &Closure{TemplateArgs: templateArgs, Types: types, Num: num}
+}
+
+// templateParamDecl parses:
+//
+// <template-param-decl> ::= Ty # type parameter
+// ::= Tn <type> # non-type parameter
+// ::= Tt <template-param-decl>* E # template parameter
+// ::= Tp <template-param-decl> # parameter pack
+//
+// Returns the new AST to include in the AST we are building and the
+// new AST to add to the list of template parameters.
+//
+// Returns nil, nil if not looking at a template-param-decl.
+func (st *state) templateParamDecl() (AST, AST) {
+ if len(st.str) < 2 || st.str[0] != 'T' {
+ return nil, nil
+ }
+ mk := func(prefix string, p *int) AST {
+ idx := *p
+ (*p)++
+ return &TemplateParamName{
+ Prefix: prefix,
+ Index: idx,
+ }
+ }
+ switch st.str[1] {
+ case 'y':
+ st.advance(2)
+ name := mk("$T", &st.typeTemplateParamCount)
+ tp := &TypeTemplateParam{
+ Name: name,
+ }
+ return tp, name
+ case 'n':
+ st.advance(2)
+ name := mk("$N", &st.nonTypeTemplateParamCount)
+ typ := st.demangleType(false)
+ tp := &NonTypeTemplateParam{
+ Name: name,
+ Type: typ,
+ }
+ return tp, name
+ case 't':
+ st.advance(2)
+ name := mk("$TT", &st.templateTemplateParamCount)
+ var params []AST
+ var template *Template
+ for {
+ if len(st.str) == 0 {
+ st.fail("expected closure template parameter")
+ }
+ if st.str[0] == 'E' {
+ st.advance(1)
+ break
+ }
+ off := st.off
+ param, templateVal := st.templateParamDecl()
+ if param == nil {
+ st.failEarlier("expected closure template parameter", st.off-off)
+ }
+ params = append(params, param)
+ if template == nil {
+ template = &Template{
+ Name: &Name{Name: "template_template"},
+ }
+ st.templates = append(st.templates, template)
+ }
+ template.Args = append(template.Args, templateVal)
+ }
+ if template != nil {
+ st.templates = st.templates[:len(st.templates)-1]
+ }
+ tp := &TemplateTemplateParam{
+ Name: name,
+ Params: params,
+ }
+ return tp, name
+ case 'p':
+ st.advance(2)
+ off := st.off
+ param, templateVal := st.templateParamDecl()
+ if param == nil {
+ st.failEarlier("expected lambda template parameter", st.off-off)
+ }
+ return &TemplateParamPack{Param: param}, templateVal
+ default:
+ return nil, nil
+ }
+}
+
+// unnamedTypeName parses:
+//
+// <unnamed-type-name> ::= Ut [ <nonnegative number> ] _
+func (st *state) unnamedTypeName() AST {
+ st.checkChar('U')
+ st.checkChar('t')
+ num := st.compactNumber()
+ ret := &UnnamedType{Num: num}
+ st.subs.add(ret)
+ return ret
+}
+
+// Recognize a clone suffix. These are not part of the mangling API,
+// but are added by GCC when cloning functions.
+func (st *state) cloneSuffix(a AST) AST {
+ i := 0
+ if len(st.str) > 1 && st.str[0] == '.' && (isLower(st.str[1]) || isDigit(st.str[1]) || st.str[1] == '_') {
+ i += 2
+ for len(st.str) > i && (isLower(st.str[i]) || isDigit(st.str[i]) || st.str[i] == '_') {
+ i++
+ }
+ }
+ for len(st.str) > i+1 && st.str[i] == '.' && isDigit(st.str[i+1]) {
+ i += 2
+ for len(st.str) > i && isDigit(st.str[i]) {
+ i++
+ }
+ }
+ suffix := st.str[:i]
+ st.advance(i)
+ return &Clone{Base: a, Suffix: suffix}
+}
+
+// substitutions is the list of substitution candidates that may
+// appear later in the string.
+type substitutions []AST
+
+// add adds a new substitution candidate.
+func (subs *substitutions) add(a AST) {
+ *subs = append(*subs, a)
+}
+
+// subAST maps standard substitution codes to the corresponding AST.
+var subAST = map[byte]AST{
+ 't': &Name{Name: "std"},
+ 'a': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}},
+ 'b': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}},
+ 's': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "string"}},
+ 'i': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "istream"}},
+ 'o': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "ostream"}},
+ 'd': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "iostream"}},
+}
+
+// verboseAST maps standard substitution codes to the long form of the
+// corresponding AST. We use this when the Verbose option is used, to
+// match the standard demangler.
+var verboseAST = map[byte]AST{
+ 't': &Name{Name: "std"},
+ 'a': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}},
+ 'b': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}},
+
+ // std::basic_string<char, std::char_traits<char>, std::allocator<char> >
+ 's': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+ // std::basic_istream<char, std::char_traits<char> >
+ 'i': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_istream"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+ // std::basic_ostream<char, std::char_traits<char> >
+ 'o': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_ostream"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+ // std::basic_iostream<char, std::char_traits<char> >
+ 'd': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_iostream"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+}
+
+// substitution parses:
+//
+// <substitution> ::= S <seq-id> _
+// ::= S_
+// ::= St
+// ::= Sa
+// ::= Sb
+// ::= Ss
+// ::= Si
+// ::= So
+// ::= Sd
+func (st *state) substitution(forPrefix bool) AST {
+ st.checkChar('S')
+ if len(st.str) == 0 {
+ st.fail("missing substitution index")
+ }
+ c := st.str[0]
+ off := st.off
+ if c == '_' || isDigit(c) || isUpper(c) {
+ id := st.seqID(false)
+ if id >= len(st.subs) {
+ st.failEarlier(fmt.Sprintf("substitution index out of range (%d >= %d)", id, len(st.subs)), st.off-off)
+ }
+
+ ret := st.subs[id]
+
+ // We need to update any references to template
+ // parameters to refer to the currently active
+ // template.
+
+ // When copying a Typed we may need to adjust
+ // the templates.
+ copyTemplates := st.templates
+ var oldLambdaTemplateLevel []int
+
+ // pushTemplate is called from skip, popTemplate from copy.
+ pushTemplate := func(template *Template) {
+ copyTemplates = append(copyTemplates, template)
+ oldLambdaTemplateLevel = append(oldLambdaTemplateLevel, st.lambdaTemplateLevel)
+ st.lambdaTemplateLevel = 0
+ }
+ popTemplate := func() {
+ copyTemplates = copyTemplates[:len(copyTemplates)-1]
+ st.lambdaTemplateLevel = oldLambdaTemplateLevel[len(oldLambdaTemplateLevel)-1]
+ oldLambdaTemplateLevel = oldLambdaTemplateLevel[:len(oldLambdaTemplateLevel)-1]
+ }
+
+ copy := func(a AST) AST {
+ var index int
+ switch a := a.(type) {
+ case *Typed:
+ // Remove the template added in skip.
+ if _, ok := a.Name.(*Template); ok {
+ popTemplate()
+ }
+ return nil
+ case *Closure:
+ // Undo the save in skip.
+ st.lambdaTemplateLevel = oldLambdaTemplateLevel[len(oldLambdaTemplateLevel)-1]
+ oldLambdaTemplateLevel = oldLambdaTemplateLevel[:len(oldLambdaTemplateLevel)-1]
+ return nil
+ case *TemplateParam:
+ index = a.Index
+ case *LambdaAuto:
+ // A lambda auto parameter is represented
+ // as a template parameter, so we may have
+ // to change back when substituting.
+ index = a.Index
+ default:
+ return nil
+ }
+ if st.lambdaTemplateLevel > 0 {
+ if _, ok := a.(*LambdaAuto); ok {
+ return nil
+ }
+ return &LambdaAuto{Index: index}
+ }
+ var template *Template
+ if len(copyTemplates) > 0 {
+ template = copyTemplates[len(copyTemplates)-1]
+ } else if rt, ok := ret.(*Template); ok {
+ // At least with clang we can see a template
+ // to start, and sometimes we need to refer
+ // to it. There is probably something wrong
+ // here.
+ template = rt
+ } else {
+ st.failEarlier("substituted template parameter not in scope of template", st.off-off)
+ }
+ if template == nil {
+ // This template parameter is within
+ // the scope of a cast operator.
+ return &TemplateParam{Index: index, Template: nil}
+ }
+
+ if index >= len(template.Args) {
+ st.failEarlier(fmt.Sprintf("substituted template index out of range (%d >= %d)", index, len(template.Args)), st.off-off)
+ }
+
+ return &TemplateParam{Index: index, Template: template}
+ }
+ var seen []AST
+ skip := func(a AST) bool {
+ switch a := a.(type) {
+ case *Typed:
+ if template, ok := a.Name.(*Template); ok {
+ // This template is removed in copy.
+ pushTemplate(template)
+ }
+ return false
+ case *Closure:
+ // This is undone in copy.
+ oldLambdaTemplateLevel = append(oldLambdaTemplateLevel, st.lambdaTemplateLevel)
+ st.lambdaTemplateLevel = len(copyTemplates) + 1
+ return false
+ case *TemplateParam, *LambdaAuto:
+ return false
+ }
+ for _, v := range seen {
+ if v == a {
+ return true
+ }
+ }
+ seen = append(seen, a)
+ return false
+ }
+
+ if c := ret.Copy(copy, skip); c != nil {
+ return c
+ }
+
+ return ret
+ } else {
+ st.advance(1)
+ m := subAST
+ if st.verbose {
+ m = verboseAST
+ }
+ // For compatibility with the standard demangler, use
+ // a longer name for a constructor or destructor.
+ if forPrefix && len(st.str) > 0 && (st.str[0] == 'C' || st.str[0] == 'D') {
+ m = verboseAST
+ }
+ a, ok := m[c]
+ if !ok {
+ st.failEarlier("unrecognized substitution code", 1)
+ }
+
+ if len(st.str) > 0 && st.str[0] == 'B' {
+ a = st.taggedName(a)
+ st.subs.add(a)
+ }
+
+ return a
+ }
+}
+
+// isDigit returns whetner c is a digit for demangling purposes.
+func isDigit(c byte) bool {
+ return c >= '0' && c <= '9'
+}
+
+// isUpper returns whether c is an upper case letter for demangling purposes.
+func isUpper(c byte) bool {
+ return c >= 'A' && c <= 'Z'
+}
+
+// isLower returns whether c is a lower case letter for demangling purposes.
+func isLower(c byte) bool {
+ return c >= 'a' && c <= 'z'
+}
+
+// simplify replaces template parameters with their expansions, and
+// merges qualifiers.
+func simplify(a AST) AST {
+ var seen []AST
+ skip := func(a AST) bool {
+ for _, v := range seen {
+ if v == a {
+ return true
+ }
+ }
+ seen = append(seen, a)
+ return false
+ }
+ if r := a.Copy(simplifyOne, skip); r != nil {
+ return r
+ }
+ return a
+}
+
+// simplifyOne simplifies a single AST. It returns nil if there is
+// nothing to do.
+func simplifyOne(a AST) AST {
+ switch a := a.(type) {
+ case *TemplateParam:
+ if a.Template != nil && a.Index < len(a.Template.Args) {
+ return a.Template.Args[a.Index]
+ }
+ case *MethodWithQualifiers:
+ if m, ok := a.Method.(*MethodWithQualifiers); ok {
+ ref := a.RefQualifier
+ if ref == "" {
+ ref = m.RefQualifier
+ } else if m.RefQualifier != "" {
+ if ref == "&" || m.RefQualifier == "&" {
+ ref = "&"
+ }
+ }
+ return &MethodWithQualifiers{Method: m.Method, Qualifiers: mergeQualifiers(a.Qualifiers, m.Qualifiers), RefQualifier: ref}
+ }
+ if t, ok := a.Method.(*TypeWithQualifiers); ok {
+ return &MethodWithQualifiers{Method: t.Base, Qualifiers: mergeQualifiers(a.Qualifiers, t.Qualifiers), RefQualifier: a.RefQualifier}
+ }
+ case *TypeWithQualifiers:
+ if ft, ok := a.Base.(*FunctionType); ok {
+ return &MethodWithQualifiers{Method: ft, Qualifiers: a.Qualifiers, RefQualifier: ""}
+ }
+ if t, ok := a.Base.(*TypeWithQualifiers); ok {
+ return &TypeWithQualifiers{Base: t.Base, Qualifiers: mergeQualifiers(a.Qualifiers, t.Qualifiers)}
+ }
+ if m, ok := a.Base.(*MethodWithQualifiers); ok {
+ return &MethodWithQualifiers{Method: m.Method, Qualifiers: mergeQualifiers(a.Qualifiers, m.Qualifiers), RefQualifier: m.RefQualifier}
+ }
+ case *ReferenceType:
+ if rt, ok := a.Base.(*ReferenceType); ok {
+ return rt
+ }
+ if rrt, ok := a.Base.(*RvalueReferenceType); ok {
+ return &ReferenceType{Base: rrt.Base}
+ }
+ case *RvalueReferenceType:
+ if rrt, ok := a.Base.(*RvalueReferenceType); ok {
+ return rrt
+ }
+ if rt, ok := a.Base.(*ReferenceType); ok {
+ return rt
+ }
+ case *ArrayType:
+ // Qualifiers on the element of an array type
+ // go on the whole array type.
+ if q, ok := a.Element.(*TypeWithQualifiers); ok {
+ return &TypeWithQualifiers{
+ Base: &ArrayType{Dimension: a.Dimension, Element: q.Base},
+ Qualifiers: q.Qualifiers,
+ }
+ }
+ case *PackExpansion:
+ // Expand the pack and replace it with a list of
+ // expressions.
+ if a.Pack != nil {
+ exprs := make([]AST, len(a.Pack.Args))
+ for i, arg := range a.Pack.Args {
+ copy := func(sub AST) AST {
+ // Replace the ArgumentPack
+ // with a specific argument.
+ if sub == a.Pack {
+ return arg
+ }
+ // Copy everything else.
+ return nil
+ }
+
+ var seen []AST
+ skip := func(sub AST) bool {
+ // Don't traverse into another
+ // pack expansion.
+ if _, ok := sub.(*PackExpansion); ok {
+ return true
+ }
+ for _, v := range seen {
+ if v == sub {
+ return true
+ }
+ }
+ seen = append(seen, sub)
+ return false
+ }
+
+ b := a.Base.Copy(copy, skip)
+ if b == nil {
+ b = a.Base
+ }
+ exprs[i] = simplify(b)
+ }
+ return &ExprList{Exprs: exprs}
+ }
+ }
+ return nil
+}
+
+// findArgumentPack walks the AST looking for the argument pack for a
+// pack expansion. We find it via a template parameter.
+func (st *state) findArgumentPack(a AST) *ArgumentPack {
+ var seen []AST
+ var ret *ArgumentPack
+ a.Traverse(func(a AST) bool {
+ if ret != nil {
+ return false
+ }
+ switch a := a.(type) {
+ case *TemplateParam:
+ if a.Template == nil || a.Index >= len(a.Template.Args) {
+ return true
+ }
+ if pack, ok := a.Template.Args[a.Index].(*ArgumentPack); ok {
+ ret = pack
+ return false
+ }
+ case *PackExpansion, *Closure, *Name:
+ return false
+ case *TaggedName, *Operator, *BuiltinType, *FunctionParam:
+ return false
+ case *UnnamedType, *FixedType, *DefaultArg:
+ return false
+ }
+ for _, v := range seen {
+ if v == a {
+ return false
+ }
+ }
+ seen = append(seen, a)
+ return true
+ })
+ return ret
+}
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/rust.go b/src/cmd/vendor/github.com/ianlancetaylor/demangle/rust.go
new file mode 100644
index 0000000..f3d2d33
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/rust.go
@@ -0,0 +1,1165 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package demangle
+
+import (
+ "fmt"
+ "math"
+ "math/bits"
+ "strings"
+ "unicode/utf8"
+)
+
+// rustToString demangles a Rust symbol.
+func rustToString(name string, options []Option) (ret string, err error) {
+ if !strings.HasPrefix(name, "_R") {
+ return "", ErrNotMangledName
+ }
+
+ // When the demangling routines encounter an error, they panic
+ // with a value of type demangleErr.
+ defer func() {
+ if r := recover(); r != nil {
+ if de, ok := r.(demangleErr); ok {
+ ret = ""
+ err = de
+ return
+ }
+ panic(r)
+ }
+ }()
+
+ suffix := ""
+ dot := strings.Index(name, ".")
+ if dot >= 0 {
+ suffix = name[dot:]
+ name = name[:dot]
+ }
+
+ name = name[2:]
+ rst := &rustState{orig: name, str: name}
+
+ for _, o := range options {
+ if o == NoTemplateParams {
+ rst.noGenericArgs = true
+ } else if isMaxLength(o) {
+ rst.max = maxLength(o)
+ }
+ }
+
+ rst.symbolName()
+
+ if len(rst.str) > 0 {
+ rst.fail("unparsed characters at end of mangled name")
+ }
+
+ if suffix != "" {
+ llvmStyle := false
+ for _, o := range options {
+ if o == LLVMStyle {
+ llvmStyle = true
+ break
+ }
+ }
+ if llvmStyle {
+ rst.skip = false
+ rst.writeString(" (")
+ rst.writeString(suffix)
+ rst.writeByte(')')
+ }
+ }
+
+ s := rst.buf.String()
+ if rst.max > 0 && len(s) > rst.max {
+ s = s[:rst.max]
+ }
+ return s, nil
+}
+
+// A rustState holds the current state of demangling a Rust string.
+type rustState struct {
+ orig string // the original string being demangled
+ str string // remainder of string to demangle
+ off int // offset of str within original string
+ buf strings.Builder // demangled string being built
+ skip bool // don't print, just skip
+ lifetimes int64 // number of bound lifetimes
+ last byte // last byte written to buffer
+ noGenericArgs bool // don't demangle generic arguments
+ max int // maximum output length
+}
+
+// fail panics with demangleErr, to be caught in rustToString.
+func (rst *rustState) fail(err string) {
+ panic(demangleErr{err: err, off: rst.off})
+}
+
+// advance advances the current string offset.
+func (rst *rustState) advance(add int) {
+ if len(rst.str) < add {
+ panic("internal error")
+ }
+ rst.str = rst.str[add:]
+ rst.off += add
+}
+
+// checkChar requires that the next character in the string be c,
+// and advances past it.
+func (rst *rustState) checkChar(c byte) {
+ if len(rst.str) == 0 || rst.str[0] != c {
+ rst.fail("expected " + string(c))
+ }
+ rst.advance(1)
+}
+
+// writeByte writes a byte to the buffer.
+func (rst *rustState) writeByte(c byte) {
+ if rst.skip {
+ return
+ }
+ if rst.max > 0 && rst.buf.Len() > rst.max {
+ rst.skip = true
+ return
+ }
+ rst.last = c
+ rst.buf.WriteByte(c)
+}
+
+// writeString writes a string to the buffer.
+func (rst *rustState) writeString(s string) {
+ if rst.skip {
+ return
+ }
+ if rst.max > 0 && rst.buf.Len() > rst.max {
+ rst.skip = true
+ return
+ }
+ if len(s) > 0 {
+ rst.last = s[len(s)-1]
+ rst.buf.WriteString(s)
+ }
+}
+
+// symbolName parses:
+//
+// <symbol-name> = "_R" [<decimal-number>] <path> [<instantiating-crate>]
+// <instantiating-crate> = <path>
+//
+// We've already skipped the "_R".
+func (rst *rustState) symbolName() {
+ if len(rst.str) < 1 {
+ rst.fail("expected symbol-name")
+ }
+
+ if isDigit(rst.str[0]) {
+ rst.fail("unsupported Rust encoding version")
+ }
+
+ rst.path(true)
+
+ if len(rst.str) > 0 {
+ rst.skip = true
+ rst.path(false)
+ }
+}
+
+// path parses:
+//
+// <path> = "C" <identifier> // crate root
+// | "M" <impl-path> <type> // <T> (inherent impl)
+// | "X" <impl-path> <type> <path> // <T as Trait> (trait impl)
+// | "Y" <type> <path> // <T as Trait> (trait definition)
+// | "N" <namespace> <path> <identifier> // ...::ident (nested path)
+// | "I" <path> {<generic-arg>} "E" // ...<T, U> (generic args)
+// | <backref>
+// <namespace> = "C" // closure
+// | "S" // shim
+// | <A-Z> // other special namespaces
+// | <a-z> // internal namespaces
+//
+// needsSeparator is true if we need to write out :: for a generic;
+// it is passed as false if we are in the middle of a type.
+func (rst *rustState) path(needsSeparator bool) {
+ if len(rst.str) < 1 {
+ rst.fail("expected path")
+ }
+ switch c := rst.str[0]; c {
+ case 'C':
+ rst.advance(1)
+ _, ident := rst.identifier()
+ rst.writeString(ident)
+ case 'M', 'X':
+ rst.advance(1)
+ rst.implPath()
+ rst.writeByte('<')
+ rst.demangleType()
+ if c == 'X' {
+ rst.writeString(" as ")
+ rst.path(false)
+ }
+ rst.writeByte('>')
+ case 'Y':
+ rst.advance(1)
+ rst.writeByte('<')
+ rst.demangleType()
+ rst.writeString(" as ")
+ rst.path(false)
+ rst.writeByte('>')
+ case 'N':
+ rst.advance(1)
+
+ if len(rst.str) < 1 {
+ rst.fail("expected namespace")
+ }
+ ns := rst.str[0]
+ switch {
+ case ns >= 'a' && ns <= 'z':
+ case ns >= 'A' && ns <= 'Z':
+ default:
+ rst.fail("invalid namespace character")
+ }
+ rst.advance(1)
+
+ rst.path(needsSeparator)
+
+ dis, ident := rst.identifier()
+
+ if ns >= 'A' && ns <= 'Z' {
+ rst.writeString("::{")
+ switch ns {
+ case 'C':
+ rst.writeString("closure")
+ case 'S':
+ rst.writeString("shim")
+ default:
+ rst.writeByte(ns)
+ }
+ if len(ident) > 0 {
+ rst.writeByte(':')
+ rst.writeString(ident)
+ }
+ if !rst.skip {
+ fmt.Fprintf(&rst.buf, "#%d}", dis)
+ rst.last = '}'
+ }
+ } else {
+ rst.writeString("::")
+ rst.writeString(ident)
+ }
+ case 'I':
+ rst.advance(1)
+ rst.path(needsSeparator)
+ if needsSeparator {
+ rst.writeString("::")
+ }
+ rst.writeByte('<')
+ rst.genericArgs()
+ rst.writeByte('>')
+ rst.checkChar('E')
+ case 'B':
+ rst.backref(func() { rst.path(needsSeparator) })
+ default:
+ rst.fail("unrecognized letter in path")
+ }
+}
+
+// implPath parses:
+//
+// <impl-path> = [<disambiguator>] <path>
+func (rst *rustState) implPath() {
+ // This path is not part of the demangled string.
+ hold := rst.skip
+ rst.skip = true
+ defer func() {
+ rst.skip = hold
+ }()
+
+ rst.disambiguator()
+ rst.path(false)
+}
+
+// identifier parses:
+//
+// <identifier> = [<disambiguator>] <undisambiguated-identifier>
+//
+// It returns the disambiguator and the identifier.
+func (rst *rustState) identifier() (int64, string) {
+ dis := rst.disambiguator()
+ ident, _ := rst.undisambiguatedIdentifier()
+ return dis, ident
+}
+
+// disambiguator parses an optional:
+//
+// <disambiguator> = "s" <base-62-number>
+func (rst *rustState) disambiguator() int64 {
+ if len(rst.str) == 0 || rst.str[0] != 's' {
+ return 0
+ }
+ rst.advance(1)
+ return rst.base62Number() + 1
+}
+
+// undisambiguatedIdentifier parses:
+//
+// <undisambiguated-identifier> = ["u"] <decimal-number> ["_"] <bytes>
+func (rst *rustState) undisambiguatedIdentifier() (id string, isPunycode bool) {
+ isPunycode = false
+ if len(rst.str) > 0 && rst.str[0] == 'u' {
+ rst.advance(1)
+ isPunycode = true
+ }
+
+ val := rst.decimalNumber()
+
+ if len(rst.str) > 0 && rst.str[0] == '_' {
+ rst.advance(1)
+ }
+
+ if len(rst.str) < val {
+ rst.fail("not enough characters for identifier")
+ }
+ id = rst.str[:val]
+ rst.advance(val)
+
+ for i := 0; i < len(id); i++ {
+ c := id[i]
+ switch {
+ case c >= '0' && c <= '9':
+ case c >= 'A' && c <= 'Z':
+ case c >= 'a' && c <= 'z':
+ case c == '_':
+ default:
+ rst.fail("invalid character in identifier")
+ }
+ }
+
+ if isPunycode {
+ id = rst.expandPunycode(id)
+ }
+
+ return id, isPunycode
+}
+
+// expandPunycode decodes the Rust version of punycode.
+// This algorithm is taken from RFC 3492 section 6.2.
+func (rst *rustState) expandPunycode(s string) string {
+ const (
+ base = 36
+ tmin = 1
+ tmax = 26
+ skew = 38
+ damp = 700
+ initialBias = 72
+ initialN = 128
+ )
+
+ var (
+ output []rune
+ encoding string
+ )
+ idx := strings.LastIndex(s, "_")
+ if idx >= 0 {
+ output = []rune(s[:idx])
+ encoding = s[idx+1:]
+ } else {
+ encoding = s
+ }
+
+ i := 0
+ n := initialN
+ bias := initialBias
+
+ pos := 0
+ for pos < len(encoding) {
+ oldI := i
+ w := 1
+ for k := base; ; k += base {
+ if pos == len(encoding) {
+ rst.fail("unterminated punycode")
+ }
+
+ var digit byte
+ d := encoding[pos]
+ pos++
+ switch {
+ case '0' <= d && d <= '9':
+ digit = d - '0' + 26
+ case 'A' <= d && d <= 'Z':
+ digit = d - 'A'
+ case 'a' <= d && d <= 'z':
+ digit = d - 'a'
+ default:
+ rst.fail("invalid punycode digit")
+ }
+
+ i += int(digit) * w
+ if i < 0 {
+ rst.fail("punycode number overflow")
+ }
+
+ var t int
+ if k <= bias {
+ t = tmin
+ } else if k > bias+tmax {
+ t = tmax
+ } else {
+ t = k - bias
+ }
+
+ if int(digit) < t {
+ break
+ }
+
+ if w >= math.MaxInt32/base {
+ rst.fail("punycode number overflow")
+ }
+ w *= base - t
+ }
+
+ delta := i - oldI
+ numPoints := len(output) + 1
+ firstTime := oldI == 0
+ if firstTime {
+ delta /= damp
+ } else {
+ delta /= 2
+ }
+ delta += delta / numPoints
+ k := 0
+ for delta > ((base-tmin)*tmax)/2 {
+ delta /= base - tmin
+ k += base
+ }
+ bias = k + ((base-tmin+1)*delta)/(delta+skew)
+
+ n += i / (len(output) + 1)
+ if n > utf8.MaxRune {
+ rst.fail("punycode rune overflow")
+ } else if !utf8.ValidRune(rune(n)) {
+ rst.fail("punycode invalid code point")
+ }
+ i %= len(output) + 1
+ output = append(output, 0)
+ copy(output[i+1:], output[i:])
+ output[i] = rune(n)
+ i++
+ }
+
+ return string(output)
+}
+
+// genericArgs prints a list of generic arguments, without angle brackets.
+func (rst *rustState) genericArgs() {
+ if rst.noGenericArgs {
+ hold := rst.skip
+ rst.skip = true
+ defer func() {
+ rst.skip = hold
+ }()
+ }
+
+ first := true
+ for len(rst.str) > 0 && rst.str[0] != 'E' {
+ if first {
+ first = false
+ } else {
+ rst.writeString(", ")
+ }
+ rst.genericArg()
+ }
+}
+
+// genericArg parses:
+//
+// <generic-arg> = <lifetime>
+// | <type>
+// | "K" <const> // forward-compat for const generics
+// <lifetime> = "L" <base-62-number>
+func (rst *rustState) genericArg() {
+ if len(rst.str) < 1 {
+ rst.fail("expected generic-arg")
+ }
+ if rst.str[0] == 'L' {
+ rst.advance(1)
+ rst.writeLifetime(rst.base62Number())
+ } else if rst.str[0] == 'K' {
+ rst.advance(1)
+ rst.demangleConst()
+ } else {
+ rst.demangleType()
+ }
+}
+
+// binder parses an optional:
+//
+// <binder> = "G" <base-62-number>
+func (rst *rustState) binder() {
+ if len(rst.str) < 1 || rst.str[0] != 'G' {
+ return
+ }
+ rst.advance(1)
+
+ binderLifetimes := rst.base62Number() + 1
+
+ // Every bound lifetime should be referenced later.
+ if binderLifetimes >= int64(len(rst.str))-rst.lifetimes {
+ rst.fail("binder lifetimes overflow")
+ }
+
+ rst.writeString("for<")
+ for i := int64(0); i < binderLifetimes; i++ {
+ if i > 0 {
+ rst.writeString(", ")
+ }
+ rst.lifetimes++
+ rst.writeLifetime(1)
+ }
+ rst.writeString("> ")
+}
+
+// demangleType parses:
+//
+// <type> = <basic-type>
+// | <path> // named type
+// | "A" <type> <const> // [T; N]
+// | "S" <type> // [T]
+// | "T" {<type>} "E" // (T1, T2, T3, ...)
+// | "R" [<lifetime>] <type> // &T
+// | "Q" [<lifetime>] <type> // &mut T
+// | "P" <type> // *const T
+// | "O" <type> // *mut T
+// | "F" <fn-sig> // fn(...) -> ...
+// | "D" <dyn-bounds> <lifetime> // dyn Trait<Assoc = X> + Send + 'a
+// | <backref>
+func (rst *rustState) demangleType() {
+ if len(rst.str) < 1 {
+ rst.fail("expected type")
+ }
+ c := rst.str[0]
+ if c >= 'a' && c <= 'z' {
+ rst.basicType()
+ return
+ }
+ switch c {
+ case 'C', 'M', 'X', 'Y', 'N', 'I':
+ rst.path(false)
+ case 'A', 'S':
+ rst.advance(1)
+ rst.writeByte('[')
+ rst.demangleType()
+ if c == 'A' {
+ rst.writeString("; ")
+ rst.demangleConst()
+ }
+ rst.writeByte(']')
+ case 'T':
+ rst.advance(1)
+ rst.writeByte('(')
+ c := 0
+ for len(rst.str) > 0 && rst.str[0] != 'E' {
+ if c > 0 {
+ rst.writeString(", ")
+ }
+ c++
+ rst.demangleType()
+ }
+ if c == 1 {
+ rst.writeByte(',')
+ }
+ rst.writeByte(')')
+ rst.checkChar('E')
+ case 'R', 'Q':
+ rst.advance(1)
+ rst.writeByte('&')
+ if len(rst.str) > 0 && rst.str[0] == 'L' {
+ rst.advance(1)
+ if lifetime := rst.base62Number(); lifetime > 0 {
+ rst.writeLifetime(lifetime)
+ rst.writeByte(' ')
+ }
+ }
+ if c == 'Q' {
+ rst.writeString("mut ")
+ }
+ rst.demangleType()
+ case 'P':
+ rst.advance(1)
+ rst.writeString("*const ")
+ rst.demangleType()
+ case 'O':
+ rst.advance(1)
+ rst.writeString("*mut ")
+ rst.demangleType()
+ case 'F':
+ rst.advance(1)
+ hold := rst.lifetimes
+ rst.fnSig()
+ rst.lifetimes = hold
+ case 'D':
+ rst.advance(1)
+ hold := rst.lifetimes
+ rst.dynBounds()
+ rst.lifetimes = hold
+ if len(rst.str) == 0 || rst.str[0] != 'L' {
+ rst.fail("expected L")
+ }
+ rst.advance(1)
+ if lifetime := rst.base62Number(); lifetime > 0 {
+ if rst.last != ' ' {
+ rst.writeByte(' ')
+ }
+ rst.writeString("+ ")
+ rst.writeLifetime(lifetime)
+ }
+ case 'B':
+ rst.backref(rst.demangleType)
+ default:
+ rst.fail("unrecognized character in type")
+ }
+}
+
+var rustBasicTypes = map[byte]string{
+ 'a': "i8",
+ 'b': "bool",
+ 'c': "char",
+ 'd': "f64",
+ 'e': "str",
+ 'f': "f32",
+ 'h': "u8",
+ 'i': "isize",
+ 'j': "usize",
+ 'l': "i32",
+ 'm': "u32",
+ 'n': "i128",
+ 'o': "u128",
+ 'p': "_",
+ 's': "i16",
+ 't': "u16",
+ 'u': "()",
+ 'v': "...",
+ 'x': "i64",
+ 'y': "u64",
+ 'z': "!",
+}
+
+// basicType parses:
+//
+// <basic-type>
+func (rst *rustState) basicType() {
+ if len(rst.str) < 1 {
+ rst.fail("expected basic type")
+ }
+ str, ok := rustBasicTypes[rst.str[0]]
+ if !ok {
+ rst.fail("unrecognized basic type character")
+ }
+ rst.advance(1)
+ rst.writeString(str)
+}
+
+// fnSig parses:
+//
+// <fn-sig> = [<binder>] ["U"] ["K" <abi>] {<type>} "E" <type>
+// <abi> = "C"
+// | <undisambiguated-identifier>
+func (rst *rustState) fnSig() {
+ rst.binder()
+ if len(rst.str) > 0 && rst.str[0] == 'U' {
+ rst.advance(1)
+ rst.writeString("unsafe ")
+ }
+ if len(rst.str) > 0 && rst.str[0] == 'K' {
+ rst.advance(1)
+ if len(rst.str) > 0 && rst.str[0] == 'C' {
+ rst.advance(1)
+ rst.writeString(`extern "C" `)
+ } else {
+ rst.writeString(`extern "`)
+ id, isPunycode := rst.undisambiguatedIdentifier()
+ if isPunycode {
+ rst.fail("punycode used in ABI string")
+ }
+ id = strings.ReplaceAll(id, "_", "-")
+ rst.writeString(id)
+ rst.writeString(`" `)
+ }
+ }
+ rst.writeString("fn(")
+ first := true
+ for len(rst.str) > 0 && rst.str[0] != 'E' {
+ if first {
+ first = false
+ } else {
+ rst.writeString(", ")
+ }
+ rst.demangleType()
+ }
+ rst.checkChar('E')
+ rst.writeByte(')')
+ if len(rst.str) > 0 && rst.str[0] == 'u' {
+ rst.advance(1)
+ } else {
+ rst.writeString(" -> ")
+ rst.demangleType()
+ }
+}
+
+// dynBounds parses:
+//
+// <dyn-bounds> = [<binder>] {<dyn-trait>} "E"
+func (rst *rustState) dynBounds() {
+ rst.writeString("dyn ")
+ rst.binder()
+ first := true
+ for len(rst.str) > 0 && rst.str[0] != 'E' {
+ if first {
+ first = false
+ } else {
+ rst.writeString(" + ")
+ }
+ rst.dynTrait()
+ }
+ rst.checkChar('E')
+}
+
+// dynTrait parses:
+//
+// <dyn-trait> = <path> {<dyn-trait-assoc-binding>}
+// <dyn-trait-assoc-binding> = "p" <undisambiguated-identifier> <type>
+func (rst *rustState) dynTrait() {
+ started := rst.pathStartGenerics()
+ for len(rst.str) > 0 && rst.str[0] == 'p' {
+ rst.advance(1)
+ if started {
+ rst.writeString(", ")
+ } else {
+ rst.writeByte('<')
+ started = true
+ }
+ id, _ := rst.undisambiguatedIdentifier()
+ rst.writeString(id)
+ rst.writeString(" = ")
+ rst.demangleType()
+ }
+ if started {
+ rst.writeByte('>')
+ }
+}
+
+// pathStartGenerics is like path but if it sees an I to start generic
+// arguments it won't close them. It reports whether it started generics.
+func (rst *rustState) pathStartGenerics() bool {
+ if len(rst.str) < 1 {
+ rst.fail("expected path")
+ }
+ switch rst.str[0] {
+ case 'I':
+ rst.advance(1)
+ rst.path(false)
+ rst.writeByte('<')
+ rst.genericArgs()
+ rst.checkChar('E')
+ return true
+ case 'B':
+ var started bool
+ rst.backref(func() { started = rst.pathStartGenerics() })
+ return started
+ default:
+ rst.path(false)
+ return false
+ }
+}
+
+// writeLifetime writes out a lifetime binding.
+func (rst *rustState) writeLifetime(lifetime int64) {
+ rst.writeByte('\'')
+ if lifetime == 0 {
+ rst.writeByte('_')
+ return
+ }
+ depth := rst.lifetimes - lifetime
+ if depth < 0 {
+ rst.fail("invalid lifetime")
+ } else if depth < 26 {
+ rst.writeByte('a' + byte(depth))
+ } else {
+ rst.writeByte('z')
+ if !rst.skip {
+ fmt.Fprintf(&rst.buf, "%d", depth-26+1)
+ rst.last = '0'
+ }
+ }
+}
+
+// demangleConst parses:
+//
+// <const> = <type> <const-data>
+// | "p" // placeholder, shown as _
+// | <backref>
+// <const-data> = ["n"] {<hex-digit>} "_"
+func (rst *rustState) demangleConst() {
+ if len(rst.str) < 1 {
+ rst.fail("expected constant")
+ }
+
+ if rst.str[0] == 'B' {
+ rst.backref(rst.demangleConst)
+ return
+ }
+
+ if rst.str[0] == 'p' {
+ rst.advance(1)
+ rst.writeByte('_')
+ return
+ }
+
+ typ := rst.str[0]
+
+ const (
+ invalid = iota
+ signedInt
+ unsignedInt
+ boolean
+ character
+ )
+
+ var kind int
+ switch typ {
+ case 'a', 's', 'l', 'x', 'n', 'i':
+ kind = signedInt
+ case 'h', 't', 'm', 'y', 'o', 'j':
+ kind = unsignedInt
+ case 'b':
+ kind = boolean
+ case 'c':
+ kind = character
+ default:
+ rst.fail("unrecognized constant type")
+ }
+
+ rst.advance(1)
+
+ if kind == signedInt && len(rst.str) > 0 && rst.str[0] == 'n' {
+ rst.advance(1)
+ rst.writeByte('-')
+ }
+
+ start := rst.str
+ digits := 0
+ val := uint64(0)
+digitLoop:
+ for len(rst.str) > 0 {
+ c := rst.str[0]
+ var digit uint64
+ switch {
+ case c >= '0' && c <= '9':
+ digit = uint64(c - '0')
+ case c >= 'a' && c <= 'f':
+ digit = uint64(c - 'a' + 10)
+ case c == '_':
+ rst.advance(1)
+ break digitLoop
+ default:
+ rst.fail("expected hex digit or _")
+ }
+ rst.advance(1)
+ if val == 0 && digit == 0 && (len(rst.str) == 0 || rst.str[0] != '_') {
+ rst.fail("invalid leading 0 in constant")
+ }
+ val *= 16
+ val += digit
+ digits++
+ }
+
+ if digits == 0 {
+ rst.fail("expected constant")
+ }
+
+ switch kind {
+ case signedInt, unsignedInt:
+ if digits > 16 {
+ // Value too big, just write out the string.
+ rst.writeString("0x")
+ rst.writeString(start[:digits])
+ } else {
+ if !rst.skip {
+ fmt.Fprintf(&rst.buf, "%d", val)
+ rst.last = '0'
+ }
+ }
+ case boolean:
+ if digits > 1 {
+ rst.fail("boolean value too large")
+ } else if val == 0 {
+ rst.writeString("false")
+ } else if val == 1 {
+ rst.writeString("true")
+ } else {
+ rst.fail("invalid boolean value")
+ }
+ case character:
+ if digits > 6 {
+ rst.fail("character value too large")
+ }
+ rst.writeByte('\'')
+ if val == '\t' {
+ rst.writeString(`\t`)
+ } else if val == '\r' {
+ rst.writeString(`\r`)
+ } else if val == '\n' {
+ rst.writeString(`\n`)
+ } else if val == '\\' {
+ rst.writeString(`\\`)
+ } else if val == '\'' {
+ rst.writeString(`\'`)
+ } else if val >= ' ' && val <= '~' {
+ // printable ASCII character
+ rst.writeByte(byte(val))
+ } else {
+ if !rst.skip {
+ fmt.Fprintf(&rst.buf, `\u{%x}`, val)
+ rst.last = '}'
+ }
+ }
+ rst.writeByte('\'')
+ default:
+ panic("internal error")
+ }
+}
+
+// base62Number parses:
+//
+// <base-62-number> = {<0-9a-zA-Z>} "_"
+func (rst *rustState) base62Number() int64 {
+ if len(rst.str) > 0 && rst.str[0] == '_' {
+ rst.advance(1)
+ return 0
+ }
+ val := int64(0)
+ for len(rst.str) > 0 {
+ c := rst.str[0]
+ rst.advance(1)
+ if c == '_' {
+ return val + 1
+ }
+ val *= 62
+ if c >= '0' && c <= '9' {
+ val += int64(c - '0')
+ } else if c >= 'a' && c <= 'z' {
+ val += int64(c - 'a' + 10)
+ } else if c >= 'A' && c <= 'Z' {
+ val += int64(c - 'A' + 36)
+ } else {
+ rst.fail("invalid digit in base 62 number")
+ }
+ }
+ rst.fail("expected _ after base 62 number")
+ return 0
+}
+
+// backref parses:
+//
+// <backref> = "B" <base-62-number>
+func (rst *rustState) backref(demangle func()) {
+ backoff := rst.off
+
+ rst.checkChar('B')
+ idx64 := rst.base62Number()
+
+ if rst.skip {
+ return
+ }
+ if rst.max > 0 && rst.buf.Len() > rst.max {
+ return
+ }
+
+ idx := int(idx64)
+ if int64(idx) != idx64 {
+ rst.fail("backref index overflow")
+ }
+ if idx < 0 || idx >= backoff {
+ rst.fail("invalid backref index")
+ }
+
+ holdStr := rst.str
+ holdOff := rst.off
+ rst.str = rst.orig[idx:backoff]
+ rst.off = idx
+ defer func() {
+ rst.str = holdStr
+ rst.off = holdOff
+ }()
+
+ demangle()
+}
+
+func (rst *rustState) decimalNumber() int {
+ if len(rst.str) == 0 {
+ rst.fail("expected number")
+ }
+
+ val := 0
+ for len(rst.str) > 0 && isDigit(rst.str[0]) {
+ add := int(rst.str[0] - '0')
+ if val >= math.MaxInt32/10-add {
+ rst.fail("decimal number overflow")
+ }
+ val *= 10
+ val += add
+ rst.advance(1)
+ }
+ return val
+}
+
+// oldRustToString demangles a Rust symbol using the old demangling.
+// The second result reports whether this is a valid Rust mangled name.
+func oldRustToString(name string, options []Option) (string, bool) {
+ max := 0
+ for _, o := range options {
+ if isMaxLength(o) {
+ max = maxLength(o)
+ }
+ }
+
+ // We know that the string starts with _ZN.
+ name = name[3:]
+
+ hexDigit := func(c byte) (byte, bool) {
+ switch {
+ case c >= '0' && c <= '9':
+ return c - '0', true
+ case c >= 'a' && c <= 'f':
+ return c - 'a' + 10, true
+ default:
+ return 0, false
+ }
+ }
+
+ // We know that the strings end with "17h" followed by 16 characters
+ // followed by "E". We check that the 16 characters are all hex digits.
+ // Also the hex digits must contain at least 5 distinct digits.
+ seen := uint16(0)
+ for i := len(name) - 17; i < len(name)-1; i++ {
+ digit, ok := hexDigit(name[i])
+ if !ok {
+ return "", false
+ }
+ seen |= 1 << digit
+ }
+ if bits.OnesCount16(seen) < 5 {
+ return "", false
+ }
+ name = name[:len(name)-20]
+
+ // The name is a sequence of length-preceded identifiers.
+ var sb strings.Builder
+ for len(name) > 0 {
+ if max > 0 && sb.Len() > max {
+ break
+ }
+
+ if !isDigit(name[0]) {
+ return "", false
+ }
+
+ val := 0
+ for len(name) > 0 && isDigit(name[0]) {
+ add := int(name[0] - '0')
+ if val >= math.MaxInt32/10-add {
+ return "", false
+ }
+ val *= 10
+ val += add
+ name = name[1:]
+ }
+
+ // An optional trailing underscore can separate the
+ // length from the identifier.
+ if len(name) > 0 && name[0] == '_' {
+ name = name[1:]
+ val--
+ }
+
+ if len(name) < val {
+ return "", false
+ }
+
+ id := name[:val]
+ name = name[val:]
+
+ if sb.Len() > 0 {
+ sb.WriteString("::")
+ }
+
+ // Ignore leading underscores preceding escape sequences.
+ if strings.HasPrefix(id, "_$") {
+ id = id[1:]
+ }
+
+ // The identifier can have escape sequences.
+ escape:
+ for len(id) > 0 {
+ switch c := id[0]; c {
+ case '$':
+ codes := map[string]byte{
+ "SP": '@',
+ "BP": '*',
+ "RF": '&',
+ "LT": '<',
+ "GT": '>',
+ "LP": '(',
+ "RP": ')',
+ }
+
+ valid := true
+ if len(id) > 2 && id[1] == 'C' && id[2] == '$' {
+ sb.WriteByte(',')
+ id = id[3:]
+ } else if len(id) > 4 && id[1] == 'u' && id[4] == '$' {
+ dig1, ok1 := hexDigit(id[2])
+ dig2, ok2 := hexDigit(id[3])
+ val := (dig1 << 4) | dig2
+ if !ok1 || !ok2 || dig1 > 7 || val < ' ' {
+ valid = false
+ } else {
+ sb.WriteByte(val)
+ id = id[5:]
+ }
+ } else if len(id) > 3 && id[3] == '$' {
+ if code, ok := codes[id[1:3]]; !ok {
+ valid = false
+ } else {
+ sb.WriteByte(code)
+ id = id[4:]
+ }
+ } else {
+ valid = false
+ }
+ if !valid {
+ sb.WriteString(id)
+ break escape
+ }
+ case '.':
+ if strings.HasPrefix(id, "..") {
+ sb.WriteString("::")
+ id = id[2:]
+ } else {
+ sb.WriteByte(c)
+ id = id[1:]
+ }
+ default:
+ sb.WriteByte(c)
+ id = id[1:]
+ }
+ }
+ }
+
+ s := sb.String()
+ if max > 0 && len(s) > max {
+ s = s[:max]
+ }
+ return s, true
+}