summaryrefslogtreecommitdiffstats
path: root/src/cmd/vendor/github.com
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 13:14:23 +0000
commit73df946d56c74384511a194dd01dbe099584fd1a (patch)
treefd0bcea490dd81327ddfbb31e215439672c9a068 /src/cmd/vendor/github.com
parentInitial commit. (diff)
downloadgolang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.tar.xz
golang-1.16-73df946d56c74384511a194dd01dbe099584fd1a.zip
Adding upstream version 1.16.10.upstream/1.16.10upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/cmd/vendor/github.com')
-rw-r--r--src/cmd/vendor/github.com/google/pprof/AUTHORS7
-rw-r--r--src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS16
-rw-r--r--src/cmd/vendor/github.com/google/pprof/LICENSE202
-rw-r--r--src/cmd/vendor/github.com/google/pprof/driver/driver.go296
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go242
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go175
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go124
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go568
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go177
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go367
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go451
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/config.go367
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go340
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go219
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go587
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go71
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go106
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go418
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/options.go100
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go157
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go80
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go60
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go1403
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go460
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go285
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go491
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go1170
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go328
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go213
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/report.go1313
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/source.go653
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go84
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go361
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go200
-rw-r--r--src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go131
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/encode.go567
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/filter.go270
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/index.go64
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go315
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go1225
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/merge.go479
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/profile.go793
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/proto.go370
-rw-r--r--src/cmd/vendor/github.com/google/pprof/profile/prune.go178
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3/LICENSE27
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3/README.md119
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3/d3.go4675
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/LICENSE201
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go1009
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE27
-rw-r--r--src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go297
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore13
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE27
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md3
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go3205
-rw-r--r--src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go2837
56 files changed, 28923 insertions, 0 deletions
diff --git a/src/cmd/vendor/github.com/google/pprof/AUTHORS b/src/cmd/vendor/github.com/google/pprof/AUTHORS
new file mode 100644
index 0000000..fd736cb
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/AUTHORS
@@ -0,0 +1,7 @@
+# This is the official list of pprof authors for copyright purposes.
+# This file is distinct from the CONTRIBUTORS files.
+# See the latter for an explanation.
+# Names should be added to this file as:
+# Name or Organization <email address>
+# The email address is not required for organizations.
+Google Inc. \ No newline at end of file
diff --git a/src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS b/src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS
new file mode 100644
index 0000000..8c8c37d
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/CONTRIBUTORS
@@ -0,0 +1,16 @@
+# People who have agreed to one of the CLAs and can contribute patches.
+# The AUTHORS file lists the copyright holders; this file
+# lists people. For example, Google employees are listed here
+# but not in AUTHORS, because Google holds the copyright.
+#
+# https://developers.google.com/open-source/cla/individual
+# https://developers.google.com/open-source/cla/corporate
+#
+# Names should be added to this file as:
+# Name <email address>
+Raul Silvera <rsilvera@google.com>
+Tipp Moseley <tipp@google.com>
+Hyoun Kyu Cho <netforce@google.com>
+Martin Spier <spiermar@gmail.com>
+Taco de Wolff <tacodewolff@gmail.com>
+Andrew Hunter <andrewhhunter@gmail.com>
diff --git a/src/cmd/vendor/github.com/google/pprof/LICENSE b/src/cmd/vendor/github.com/google/pprof/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/cmd/vendor/github.com/google/pprof/driver/driver.go b/src/cmd/vendor/github.com/google/pprof/driver/driver.go
new file mode 100644
index 0000000..e65bc2f
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/driver/driver.go
@@ -0,0 +1,296 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package driver provides an external entry point to the pprof driver.
+package driver
+
+import (
+ "io"
+ "net/http"
+ "regexp"
+ "time"
+
+ internaldriver "github.com/google/pprof/internal/driver"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// PProf acquires a profile, and symbolizes it using a profile
+// manager. Then it generates a report formatted according to the
+// options selected through the flags package.
+func PProf(o *Options) error {
+ return internaldriver.PProf(o.internalOptions())
+}
+
+func (o *Options) internalOptions() *plugin.Options {
+ var obj plugin.ObjTool
+ if o.Obj != nil {
+ obj = &internalObjTool{o.Obj}
+ }
+ var sym plugin.Symbolizer
+ if o.Sym != nil {
+ sym = &internalSymbolizer{o.Sym}
+ }
+ var httpServer func(args *plugin.HTTPServerArgs) error
+ if o.HTTPServer != nil {
+ httpServer = func(args *plugin.HTTPServerArgs) error {
+ return o.HTTPServer(((*HTTPServerArgs)(args)))
+ }
+ }
+ return &plugin.Options{
+ Writer: o.Writer,
+ Flagset: o.Flagset,
+ Fetch: o.Fetch,
+ Sym: sym,
+ Obj: obj,
+ UI: o.UI,
+ HTTPServer: httpServer,
+ HTTPTransport: o.HTTPTransport,
+ }
+}
+
+// HTTPServerArgs contains arguments needed by an HTTP server that
+// is exporting a pprof web interface.
+type HTTPServerArgs plugin.HTTPServerArgs
+
+// Options groups all the optional plugins into pprof.
+type Options struct {
+ Writer Writer
+ Flagset FlagSet
+ Fetch Fetcher
+ Sym Symbolizer
+ Obj ObjTool
+ UI UI
+ HTTPServer func(*HTTPServerArgs) error
+ HTTPTransport http.RoundTripper
+}
+
+// Writer provides a mechanism to write data under a certain name,
+// typically a filename.
+type Writer interface {
+ Open(name string) (io.WriteCloser, error)
+}
+
+// A FlagSet creates and parses command-line flags.
+// It is similar to the standard flag.FlagSet.
+type FlagSet interface {
+ // Bool, Int, Float64, and String define new flags,
+ // like the functions of the same name in package flag.
+ Bool(name string, def bool, usage string) *bool
+ Int(name string, def int, usage string) *int
+ Float64(name string, def float64, usage string) *float64
+ String(name string, def string, usage string) *string
+
+ // StringList is similar to String but allows multiple values for a
+ // single flag
+ StringList(name string, def string, usage string) *[]*string
+
+ // ExtraUsage returns any additional text that should be printed after the
+ // standard usage message. The extra usage message returned includes all text
+ // added with AddExtraUsage().
+ // The typical use of ExtraUsage is to show any custom flags defined by the
+ // specific pprof plugins being used.
+ ExtraUsage() string
+
+ // AddExtraUsage appends additional text to the end of the extra usage message.
+ AddExtraUsage(eu string)
+
+ // Parse initializes the flags with their values for this run
+ // and returns the non-flag command line arguments.
+ // If an unknown flag is encountered or there are no arguments,
+ // Parse should call usage and return nil.
+ Parse(usage func()) []string
+}
+
+// A Fetcher reads and returns the profile named by src, using
+// the specified duration and timeout. It returns the fetched
+// profile and a string indicating a URL from where the profile
+// was fetched, which may be different than src.
+type Fetcher interface {
+ Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error)
+}
+
+// A Symbolizer introduces symbol information into a profile.
+type Symbolizer interface {
+ Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error
+}
+
+// MappingSources map each profile.Mapping to the source of the profile.
+// The key is either Mapping.File or Mapping.BuildId.
+type MappingSources map[string][]struct {
+ Source string // URL of the source the mapping was collected from
+ Start uint64 // delta applied to addresses from this source (to represent Merge adjustments)
+}
+
+// An ObjTool inspects shared libraries and executable files.
+type ObjTool interface {
+ // Open opens the named object file. If the object is a shared
+ // library, start/limit/offset are the addresses where it is mapped
+ // into memory in the address space being inspected.
+ Open(file string, start, limit, offset uint64) (ObjFile, error)
+
+ // Disasm disassembles the named object file, starting at
+ // the start address and stopping at (before) the end address.
+ Disasm(file string, start, end uint64, intelSyntax bool) ([]Inst, error)
+}
+
+// An Inst is a single instruction in an assembly listing.
+type Inst struct {
+ Addr uint64 // virtual address of instruction
+ Text string // instruction text
+ Function string // function name
+ File string // source file
+ Line int // source line
+}
+
+// An ObjFile is a single object file: a shared library or executable.
+type ObjFile interface {
+ // Name returns the underlying file name, if available.
+ Name() string
+
+ // Base returns the base address to use when looking up symbols in the file.
+ Base() uint64
+
+ // BuildID returns the GNU build ID of the file, or an empty string.
+ BuildID() string
+
+ // SourceLine reports the source line information for a given
+ // address in the file. Due to inlining, the source line information
+ // is in general a list of positions representing a call stack,
+ // with the leaf function first.
+ SourceLine(addr uint64) ([]Frame, error)
+
+ // Symbols returns a list of symbols in the object file.
+ // If r is not nil, Symbols restricts the list to symbols
+ // with names matching the regular expression.
+ // If addr is not zero, Symbols restricts the list to symbols
+ // containing that address.
+ Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error)
+
+ // Close closes the file, releasing associated resources.
+ Close() error
+}
+
+// A Frame describes a single line in a source file.
+type Frame struct {
+ Func string // name of function
+ File string // source file name
+ Line int // line in file
+}
+
+// A Sym describes a single symbol in an object file.
+type Sym struct {
+ Name []string // names of symbol (many if symbol was dedup'ed)
+ File string // object file containing symbol
+ Start uint64 // start virtual address
+ End uint64 // virtual address of last byte in sym (Start+size-1)
+}
+
+// A UI manages user interactions.
+type UI interface {
+ // Read returns a line of text (a command) read from the user.
+ // prompt is printed before reading the command.
+ ReadLine(prompt string) (string, error)
+
+ // Print shows a message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, Print writes to standard error.
+ // (Standard output is reserved for report data.)
+ Print(...interface{})
+
+ // PrintErr shows an error message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, PrintErr writes to standard error.
+ PrintErr(...interface{})
+
+ // IsTerminal returns whether the UI is known to be tied to an
+ // interactive terminal (as opposed to being redirected to a file).
+ IsTerminal() bool
+
+ // WantBrowser indicates whether browser should be opened with the -http option.
+ WantBrowser() bool
+
+ // SetAutoComplete instructs the UI to call complete(cmd) to obtain
+ // the auto-completion of cmd, if the UI supports auto-completion at all.
+ SetAutoComplete(complete func(string) string)
+}
+
+// internalObjTool is a wrapper to map from the pprof external
+// interface to the internal interface.
+type internalObjTool struct {
+ ObjTool
+}
+
+func (o *internalObjTool) Open(file string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ f, err := o.ObjTool.Open(file, start, limit, offset)
+ if err != nil {
+ return nil, err
+ }
+ return &internalObjFile{f}, err
+}
+
+type internalObjFile struct {
+ ObjFile
+}
+
+func (f *internalObjFile) SourceLine(frame uint64) ([]plugin.Frame, error) {
+ frames, err := f.ObjFile.SourceLine(frame)
+ if err != nil {
+ return nil, err
+ }
+ var pluginFrames []plugin.Frame
+ for _, f := range frames {
+ pluginFrames = append(pluginFrames, plugin.Frame(f))
+ }
+ return pluginFrames, nil
+}
+
+func (f *internalObjFile) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) {
+ syms, err := f.ObjFile.Symbols(r, addr)
+ if err != nil {
+ return nil, err
+ }
+ var pluginSyms []*plugin.Sym
+ for _, s := range syms {
+ ps := plugin.Sym(*s)
+ pluginSyms = append(pluginSyms, &ps)
+ }
+ return pluginSyms, nil
+}
+
+func (o *internalObjTool) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) {
+ insts, err := o.ObjTool.Disasm(file, start, end, intelSyntax)
+ if err != nil {
+ return nil, err
+ }
+ var pluginInst []plugin.Inst
+ for _, inst := range insts {
+ pluginInst = append(pluginInst, plugin.Inst(inst))
+ }
+ return pluginInst, nil
+}
+
+// internalSymbolizer is a wrapper to map from the pprof external
+// interface to the internal interface.
+type internalSymbolizer struct {
+ Symbolizer
+}
+
+func (s *internalSymbolizer) Symbolize(mode string, srcs plugin.MappingSources, prof *profile.Profile) error {
+ isrcs := MappingSources{}
+ for m, s := range srcs {
+ isrcs[m] = s
+ }
+ return s.Symbolizer.Symbolize(mode, isrcs, prof)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go
new file mode 100644
index 0000000..c0661bf
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner.go
@@ -0,0 +1,242 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os/exec"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+const (
+ defaultAddr2line = "addr2line"
+
+ // addr2line may produce multiple lines of output. We
+ // use this sentinel to identify the end of the output.
+ sentinel = ^uint64(0)
+)
+
+// addr2Liner is a connection to an addr2line command for obtaining
+// address and line number information from a binary.
+type addr2Liner struct {
+ mu sync.Mutex
+ rw lineReaderWriter
+ base uint64
+
+ // nm holds an addr2Liner using nm tool. Certain versions of addr2line
+ // produce incomplete names due to
+ // https://sourceware.org/bugzilla/show_bug.cgi?id=17541. As a workaround,
+ // the names from nm are used when they look more complete. See addrInfo()
+ // code below for the exact heuristic.
+ nm *addr2LinerNM
+}
+
+// lineReaderWriter is an interface to abstract the I/O to an addr2line
+// process. It writes a line of input to the job, and reads its output
+// one line at a time.
+type lineReaderWriter interface {
+ write(string) error
+ readLine() (string, error)
+ close()
+}
+
+type addr2LinerJob struct {
+ cmd *exec.Cmd
+ in io.WriteCloser
+ out *bufio.Reader
+}
+
+func (a *addr2LinerJob) write(s string) error {
+ _, err := fmt.Fprint(a.in, s+"\n")
+ return err
+}
+
+func (a *addr2LinerJob) readLine() (string, error) {
+ return a.out.ReadString('\n')
+}
+
+// close releases any resources used by the addr2liner object.
+func (a *addr2LinerJob) close() {
+ a.in.Close()
+ a.cmd.Wait()
+}
+
+// newAddr2liner starts the given addr2liner command reporting
+// information about the given executable file. If file is a shared
+// library, base should be the address at which it was mapped in the
+// program under consideration.
+func newAddr2Liner(cmd, file string, base uint64) (*addr2Liner, error) {
+ if cmd == "" {
+ cmd = defaultAddr2line
+ }
+
+ j := &addr2LinerJob{
+ cmd: exec.Command(cmd, "-aif", "-e", file),
+ }
+
+ var err error
+ if j.in, err = j.cmd.StdinPipe(); err != nil {
+ return nil, err
+ }
+
+ outPipe, err := j.cmd.StdoutPipe()
+ if err != nil {
+ return nil, err
+ }
+
+ j.out = bufio.NewReader(outPipe)
+ if err := j.cmd.Start(); err != nil {
+ return nil, err
+ }
+
+ a := &addr2Liner{
+ rw: j,
+ base: base,
+ }
+
+ return a, nil
+}
+
+func (d *addr2Liner) readString() (string, error) {
+ s, err := d.rw.readLine()
+ if err != nil {
+ return "", err
+ }
+ return strings.TrimSpace(s), nil
+}
+
+// readFrame parses the addr2line output for a single address. It
+// returns a populated plugin.Frame and whether it has reached the end of the
+// data.
+func (d *addr2Liner) readFrame() (plugin.Frame, bool) {
+ funcname, err := d.readString()
+ if err != nil {
+ return plugin.Frame{}, true
+ }
+ if strings.HasPrefix(funcname, "0x") {
+ // If addr2line returns a hex address we can assume it is the
+ // sentinel. Read and ignore next two lines of output from
+ // addr2line
+ d.readString()
+ d.readString()
+ return plugin.Frame{}, true
+ }
+
+ fileline, err := d.readString()
+ if err != nil {
+ return plugin.Frame{}, true
+ }
+
+ linenumber := 0
+
+ if funcname == "??" {
+ funcname = ""
+ }
+
+ if fileline == "??:0" {
+ fileline = ""
+ } else {
+ if i := strings.LastIndex(fileline, ":"); i >= 0 {
+ // Remove discriminator, if present
+ if disc := strings.Index(fileline, " (discriminator"); disc > 0 {
+ fileline = fileline[:disc]
+ }
+ // If we cannot parse a number after the last ":", keep it as
+ // part of the filename.
+ if line, err := strconv.Atoi(fileline[i+1:]); err == nil {
+ linenumber = line
+ fileline = fileline[:i]
+ }
+ }
+ }
+
+ return plugin.Frame{
+ Func: funcname,
+ File: fileline,
+ Line: linenumber}, false
+}
+
+func (d *addr2Liner) rawAddrInfo(addr uint64) ([]plugin.Frame, error) {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+
+ if err := d.rw.write(fmt.Sprintf("%x", addr-d.base)); err != nil {
+ return nil, err
+ }
+
+ if err := d.rw.write(fmt.Sprintf("%x", sentinel)); err != nil {
+ return nil, err
+ }
+
+ resp, err := d.readString()
+ if err != nil {
+ return nil, err
+ }
+
+ if !strings.HasPrefix(resp, "0x") {
+ return nil, fmt.Errorf("unexpected addr2line output: %s", resp)
+ }
+
+ var stack []plugin.Frame
+ for {
+ frame, end := d.readFrame()
+ if end {
+ break
+ }
+
+ if frame != (plugin.Frame{}) {
+ stack = append(stack, frame)
+ }
+ }
+ return stack, err
+}
+
+// addrInfo returns the stack frame information for a specific program
+// address. It returns nil if the address could not be identified.
+func (d *addr2Liner) addrInfo(addr uint64) ([]plugin.Frame, error) {
+ stack, err := d.rawAddrInfo(addr)
+ if err != nil {
+ return nil, err
+ }
+
+ // Certain versions of addr2line produce incomplete names due to
+ // https://sourceware.org/bugzilla/show_bug.cgi?id=17541. Attempt to replace
+ // the name with a better one from nm.
+ if len(stack) > 0 && d.nm != nil {
+ nm, err := d.nm.addrInfo(addr)
+ if err == nil && len(nm) > 0 {
+ // Last entry in frame list should match since it is non-inlined. As a
+ // simple heuristic, we only switch to the nm-based name if it is longer
+ // by 2 or more characters. We consider nm names that are longer by 1
+ // character insignificant to avoid replacing foo with _foo on MacOS (for
+ // unknown reasons read2line produces the former and nm produces the
+ // latter on MacOS even though both tools are asked to produce mangled
+ // names).
+ nmName := nm[len(nm)-1].Func
+ a2lName := stack[len(stack)-1].Func
+ if len(nmName) > len(a2lName)+1 {
+ stack[len(stack)-1].Func = nmName
+ }
+ }
+ }
+
+ return stack, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go
new file mode 100644
index 0000000..68fa559
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_llvm.go
@@ -0,0 +1,175 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os/exec"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+const (
+ defaultLLVMSymbolizer = "llvm-symbolizer"
+)
+
+// llvmSymbolizer is a connection to an llvm-symbolizer command for
+// obtaining address and line number information from a binary.
+type llvmSymbolizer struct {
+ sync.Mutex
+ filename string
+ rw lineReaderWriter
+ base uint64
+}
+
+type llvmSymbolizerJob struct {
+ cmd *exec.Cmd
+ in io.WriteCloser
+ out *bufio.Reader
+}
+
+func (a *llvmSymbolizerJob) write(s string) error {
+ _, err := fmt.Fprint(a.in, s+"\n")
+ return err
+}
+
+func (a *llvmSymbolizerJob) readLine() (string, error) {
+ return a.out.ReadString('\n')
+}
+
+// close releases any resources used by the llvmSymbolizer object.
+func (a *llvmSymbolizerJob) close() {
+ a.in.Close()
+ a.cmd.Wait()
+}
+
+// newLlvmSymbolizer starts the given llvmSymbolizer command reporting
+// information about the given executable file. If file is a shared
+// library, base should be the address at which it was mapped in the
+// program under consideration.
+func newLLVMSymbolizer(cmd, file string, base uint64) (*llvmSymbolizer, error) {
+ if cmd == "" {
+ cmd = defaultLLVMSymbolizer
+ }
+
+ j := &llvmSymbolizerJob{
+ cmd: exec.Command(cmd, "-inlining", "-demangle=false"),
+ }
+
+ var err error
+ if j.in, err = j.cmd.StdinPipe(); err != nil {
+ return nil, err
+ }
+
+ outPipe, err := j.cmd.StdoutPipe()
+ if err != nil {
+ return nil, err
+ }
+
+ j.out = bufio.NewReader(outPipe)
+ if err := j.cmd.Start(); err != nil {
+ return nil, err
+ }
+
+ a := &llvmSymbolizer{
+ filename: file,
+ rw: j,
+ base: base,
+ }
+
+ return a, nil
+}
+
+func (d *llvmSymbolizer) readString() (string, error) {
+ s, err := d.rw.readLine()
+ if err != nil {
+ return "", err
+ }
+ return strings.TrimSpace(s), nil
+}
+
+// readFrame parses the llvm-symbolizer output for a single address. It
+// returns a populated plugin.Frame and whether it has reached the end of the
+// data.
+func (d *llvmSymbolizer) readFrame() (plugin.Frame, bool) {
+ funcname, err := d.readString()
+ if err != nil {
+ return plugin.Frame{}, true
+ }
+
+ switch funcname {
+ case "":
+ return plugin.Frame{}, true
+ case "??":
+ funcname = ""
+ }
+
+ fileline, err := d.readString()
+ if err != nil {
+ return plugin.Frame{Func: funcname}, true
+ }
+
+ linenumber := 0
+ if fileline == "??:0" {
+ fileline = ""
+ } else {
+ switch split := strings.Split(fileline, ":"); len(split) {
+ case 1:
+ // filename
+ fileline = split[0]
+ case 2, 3:
+ // filename:line , or
+ // filename:line:disc , or
+ fileline = split[0]
+ if line, err := strconv.Atoi(split[1]); err == nil {
+ linenumber = line
+ }
+ default:
+ // Unrecognized, ignore
+ }
+ }
+
+ return plugin.Frame{Func: funcname, File: fileline, Line: linenumber}, false
+}
+
+// addrInfo returns the stack frame information for a specific program
+// address. It returns nil if the address could not be identified.
+func (d *llvmSymbolizer) addrInfo(addr uint64) ([]plugin.Frame, error) {
+ d.Lock()
+ defer d.Unlock()
+
+ if err := d.rw.write(fmt.Sprintf("%s 0x%x", d.filename, addr-d.base)); err != nil {
+ return nil, err
+ }
+
+ var stack []plugin.Frame
+ for {
+ frame, end := d.readFrame()
+ if end {
+ break
+ }
+
+ if frame != (plugin.Frame{}) {
+ stack = append(stack, frame)
+ }
+ }
+
+ return stack, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go
new file mode 100644
index 0000000..1987bd3
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/addr2liner_nm.go
@@ -0,0 +1,124 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bufio"
+ "bytes"
+ "io"
+ "os/exec"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+const (
+ defaultNM = "nm"
+)
+
+// addr2LinerNM is a connection to an nm command for obtaining address
+// information from a binary.
+type addr2LinerNM struct {
+ m []symbolInfo // Sorted list of addresses from binary.
+}
+
+type symbolInfo struct {
+ address uint64
+ name string
+}
+
+// newAddr2LinerNM starts the given nm command reporting information about the
+// given executable file. If file is a shared library, base should be
+// the address at which it was mapped in the program under
+// consideration.
+func newAddr2LinerNM(cmd, file string, base uint64) (*addr2LinerNM, error) {
+ if cmd == "" {
+ cmd = defaultNM
+ }
+ var b bytes.Buffer
+ c := exec.Command(cmd, "-n", file)
+ c.Stdout = &b
+ if err := c.Run(); err != nil {
+ return nil, err
+ }
+ return parseAddr2LinerNM(base, &b)
+}
+
+func parseAddr2LinerNM(base uint64, nm io.Reader) (*addr2LinerNM, error) {
+ a := &addr2LinerNM{
+ m: []symbolInfo{},
+ }
+
+ // Parse nm output and populate symbol map.
+ // Skip lines we fail to parse.
+ buf := bufio.NewReader(nm)
+ for {
+ line, err := buf.ReadString('\n')
+ if line == "" && err != nil {
+ if err == io.EOF {
+ break
+ }
+ return nil, err
+ }
+ line = strings.TrimSpace(line)
+ fields := strings.SplitN(line, " ", 3)
+ if len(fields) != 3 {
+ continue
+ }
+ address, err := strconv.ParseUint(fields[0], 16, 64)
+ if err != nil {
+ continue
+ }
+ a.m = append(a.m, symbolInfo{
+ address: address + base,
+ name: fields[2],
+ })
+ }
+
+ return a, nil
+}
+
+// addrInfo returns the stack frame information for a specific program
+// address. It returns nil if the address could not be identified.
+func (a *addr2LinerNM) addrInfo(addr uint64) ([]plugin.Frame, error) {
+ if len(a.m) == 0 || addr < a.m[0].address || addr > a.m[len(a.m)-1].address {
+ return nil, nil
+ }
+
+ // Binary search. Search until low, high are separated by 1.
+ low, high := 0, len(a.m)
+ for low+1 < high {
+ mid := (low + high) / 2
+ v := a.m[mid].address
+ if addr == v {
+ low = mid
+ break
+ } else if addr > v {
+ low = mid
+ } else {
+ high = mid
+ }
+ }
+
+ // Address is between a.m[low] and a.m[high].
+ // Pick low, as it represents [low, high).
+ f := []plugin.Frame{
+ {
+ Func: a.m[low].name,
+ },
+ }
+ return f, nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go
new file mode 100644
index 0000000..4b67cc4
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/binutils.go
@@ -0,0 +1,568 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package binutils provides access to the GNU binutils.
+package binutils
+
+import (
+ "debug/elf"
+ "debug/macho"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/google/pprof/internal/elfexec"
+ "github.com/google/pprof/internal/plugin"
+)
+
+// A Binutils implements plugin.ObjTool by invoking the GNU binutils.
+type Binutils struct {
+ mu sync.Mutex
+ rep *binrep
+}
+
+var objdumpLLVMVerRE = regexp.MustCompile(`LLVM version (?:(\d*)\.(\d*)\.(\d*)|.*(trunk).*)`)
+
+// binrep is an immutable representation for Binutils. It is atomically
+// replaced on every mutation to provide thread-safe access.
+type binrep struct {
+ // Commands to invoke.
+ llvmSymbolizer string
+ llvmSymbolizerFound bool
+ addr2line string
+ addr2lineFound bool
+ nm string
+ nmFound bool
+ objdump string
+ objdumpFound bool
+ isLLVMObjdump bool
+
+ // if fast, perform symbolization using nm (symbol names only),
+ // instead of file-line detail from the slower addr2line.
+ fast bool
+}
+
+// get returns the current representation for bu, initializing it if necessary.
+func (bu *Binutils) get() *binrep {
+ bu.mu.Lock()
+ r := bu.rep
+ if r == nil {
+ r = &binrep{}
+ initTools(r, "")
+ bu.rep = r
+ }
+ bu.mu.Unlock()
+ return r
+}
+
+// update modifies the rep for bu via the supplied function.
+func (bu *Binutils) update(fn func(r *binrep)) {
+ r := &binrep{}
+ bu.mu.Lock()
+ defer bu.mu.Unlock()
+ if bu.rep == nil {
+ initTools(r, "")
+ } else {
+ *r = *bu.rep
+ }
+ fn(r)
+ bu.rep = r
+}
+
+// String returns string representation of the binutils state for debug logging.
+func (bu *Binutils) String() string {
+ r := bu.get()
+ var llvmSymbolizer, addr2line, nm, objdump string
+ if r.llvmSymbolizerFound {
+ llvmSymbolizer = r.llvmSymbolizer
+ }
+ if r.addr2lineFound {
+ addr2line = r.addr2line
+ }
+ if r.nmFound {
+ nm = r.nm
+ }
+ if r.objdumpFound {
+ objdump = r.objdump
+ }
+ return fmt.Sprintf("llvm-symbolizer=%q addr2line=%q nm=%q objdump=%q fast=%t",
+ llvmSymbolizer, addr2line, nm, objdump, r.fast)
+}
+
+// SetFastSymbolization sets a toggle that makes binutils use fast
+// symbolization (using nm), which is much faster than addr2line but
+// provides only symbol name information (no file/line).
+func (bu *Binutils) SetFastSymbolization(fast bool) {
+ bu.update(func(r *binrep) { r.fast = fast })
+}
+
+// SetTools processes the contents of the tools option. It
+// expects a set of entries separated by commas; each entry is a pair
+// of the form t:path, where cmd will be used to look only for the
+// tool named t. If t is not specified, the path is searched for all
+// tools.
+func (bu *Binutils) SetTools(config string) {
+ bu.update(func(r *binrep) { initTools(r, config) })
+}
+
+func initTools(b *binrep, config string) {
+ // paths collect paths per tool; Key "" contains the default.
+ paths := make(map[string][]string)
+ for _, t := range strings.Split(config, ",") {
+ name, path := "", t
+ if ct := strings.SplitN(t, ":", 2); len(ct) == 2 {
+ name, path = ct[0], ct[1]
+ }
+ paths[name] = append(paths[name], path)
+ }
+
+ defaultPath := paths[""]
+ b.llvmSymbolizer, b.llvmSymbolizerFound = chooseExe([]string{"llvm-symbolizer"}, []string{}, append(paths["llvm-symbolizer"], defaultPath...))
+ b.addr2line, b.addr2lineFound = chooseExe([]string{"addr2line"}, []string{"gaddr2line"}, append(paths["addr2line"], defaultPath...))
+ // The "-n" option is supported by LLVM since 2011. The output of llvm-nm
+ // and GNU nm with "-n" option is interchangeable for our purposes, so we do
+ // not need to differrentiate them.
+ b.nm, b.nmFound = chooseExe([]string{"llvm-nm", "nm"}, []string{"gnm"}, append(paths["nm"], defaultPath...))
+ b.objdump, b.objdumpFound, b.isLLVMObjdump = findObjdump(append(paths["objdump"], defaultPath...))
+}
+
+// findObjdump finds and returns path to preferred objdump binary.
+// Order of preference is: llvm-objdump, objdump.
+// On MacOS only, also looks for gobjdump with least preference.
+// Accepts a list of paths and returns:
+// a string with path to the preferred objdump binary if found,
+// or an empty string if not found;
+// a boolean if any acceptable objdump was found;
+// a boolean indicating if it is an LLVM objdump.
+func findObjdump(paths []string) (string, bool, bool) {
+ objdumpNames := []string{"llvm-objdump", "objdump"}
+ if runtime.GOOS == "darwin" {
+ objdumpNames = append(objdumpNames, "gobjdump")
+ }
+
+ for _, objdumpName := range objdumpNames {
+ if objdump, objdumpFound := findExe(objdumpName, paths); objdumpFound {
+ cmdOut, err := exec.Command(objdump, "--version").Output()
+ if err != nil {
+ continue
+ }
+ if isLLVMObjdump(string(cmdOut)) {
+ return objdump, true, true
+ }
+ if isBuObjdump(string(cmdOut)) {
+ return objdump, true, false
+ }
+ }
+ }
+ return "", false, false
+}
+
+// chooseExe finds and returns path to preferred binary. names is a list of
+// names to search on both Linux and OSX. osxNames is a list of names specific
+// to OSX. names always has a higher priority than osxNames. The order of
+// the name within each list decides its priority (e.g. the first name has a
+// higher priority than the second name in the list).
+//
+// It returns a string with path to the binary and a boolean indicating if any
+// acceptable binary was found.
+func chooseExe(names, osxNames []string, paths []string) (string, bool) {
+ if runtime.GOOS == "darwin" {
+ names = append(names, osxNames...)
+ }
+ for _, name := range names {
+ if binary, found := findExe(name, paths); found {
+ return binary, true
+ }
+ }
+ return "", false
+}
+
+// isLLVMObjdump accepts a string with path to an objdump binary,
+// and returns a boolean indicating if the given binary is an LLVM
+// objdump binary of an acceptable version.
+func isLLVMObjdump(output string) bool {
+ fields := objdumpLLVMVerRE.FindStringSubmatch(output)
+ if len(fields) != 5 {
+ return false
+ }
+ if fields[4] == "trunk" {
+ return true
+ }
+ verMajor, err := strconv.Atoi(fields[1])
+ if err != nil {
+ return false
+ }
+ verPatch, err := strconv.Atoi(fields[3])
+ if err != nil {
+ return false
+ }
+ if runtime.GOOS == "linux" && verMajor >= 8 {
+ // Ensure LLVM objdump is at least version 8.0 on Linux.
+ // Some flags, like --demangle, and double dashes for options are
+ // not supported by previous versions.
+ return true
+ }
+ if runtime.GOOS == "darwin" {
+ // Ensure LLVM objdump is at least version 10.0.1 on MacOS.
+ return verMajor > 10 || (verMajor == 10 && verPatch >= 1)
+ }
+ return false
+}
+
+// isBuObjdump accepts a string with path to an objdump binary,
+// and returns a boolean indicating if the given binary is a GNU
+// binutils objdump binary. No version check is performed.
+func isBuObjdump(output string) bool {
+ return strings.Contains(output, "GNU objdump")
+}
+
+// findExe looks for an executable command on a set of paths.
+// If it cannot find it, returns cmd.
+func findExe(cmd string, paths []string) (string, bool) {
+ for _, p := range paths {
+ cp := filepath.Join(p, cmd)
+ if c, err := exec.LookPath(cp); err == nil {
+ return c, true
+ }
+ }
+ return cmd, false
+}
+
+// Disasm returns the assembly instructions for the specified address range
+// of a binary.
+func (bu *Binutils) Disasm(file string, start, end uint64, intelSyntax bool) ([]plugin.Inst, error) {
+ b := bu.get()
+ if !b.objdumpFound {
+ return nil, errors.New("cannot disasm: no objdump tool available")
+ }
+ args := []string{"--disassemble-all", "--demangle", "--no-show-raw-insn",
+ "--line-numbers", fmt.Sprintf("--start-address=%#x", start),
+ fmt.Sprintf("--stop-address=%#x", end)}
+
+ if intelSyntax {
+ if b.isLLVMObjdump {
+ args = append(args, "--x86-asm-syntax=intel")
+ } else {
+ args = append(args, "-M", "intel")
+ }
+ }
+
+ args = append(args, file)
+ cmd := exec.Command(b.objdump, args...)
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("%v: %v", cmd.Args, err)
+ }
+
+ return disassemble(out)
+}
+
+// Open satisfies the plugin.ObjTool interface.
+func (bu *Binutils) Open(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ b := bu.get()
+
+ // Make sure file is a supported executable.
+ // This uses magic numbers, mainly to provide better error messages but
+ // it should also help speed.
+
+ if _, err := os.Stat(name); err != nil {
+ // For testing, do not require file name to exist.
+ if strings.Contains(b.addr2line, "testdata/") {
+ return &fileAddr2Line{file: file{b: b, name: name}}, nil
+ }
+ return nil, err
+ }
+
+ // Read the first 4 bytes of the file.
+
+ f, err := os.Open(name)
+ if err != nil {
+ return nil, fmt.Errorf("error opening %s: %v", name, err)
+ }
+ defer f.Close()
+
+ var header [4]byte
+ if _, err = io.ReadFull(f, header[:]); err != nil {
+ return nil, fmt.Errorf("error reading magic number from %s: %v", name, err)
+ }
+
+ elfMagic := string(header[:])
+
+ // Match against supported file types.
+ if elfMagic == elf.ELFMAG {
+ f, err := b.openELF(name, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("error reading ELF file %s: %v", name, err)
+ }
+ return f, nil
+ }
+
+ // Mach-O magic numbers can be big or little endian.
+ machoMagicLittle := binary.LittleEndian.Uint32(header[:])
+ machoMagicBig := binary.BigEndian.Uint32(header[:])
+
+ if machoMagicLittle == macho.Magic32 || machoMagicLittle == macho.Magic64 ||
+ machoMagicBig == macho.Magic32 || machoMagicBig == macho.Magic64 {
+ f, err := b.openMachO(name, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("error reading Mach-O file %s: %v", name, err)
+ }
+ return f, nil
+ }
+ if machoMagicLittle == macho.MagicFat || machoMagicBig == macho.MagicFat {
+ f, err := b.openFatMachO(name, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("error reading fat Mach-O file %s: %v", name, err)
+ }
+ return f, nil
+ }
+
+ return nil, fmt.Errorf("unrecognized binary format: %s", name)
+}
+
+func (b *binrep) openMachOCommon(name string, of *macho.File, start, limit, offset uint64) (plugin.ObjFile, error) {
+
+ // Subtract the load address of the __TEXT section. Usually 0 for shared
+ // libraries or 0x100000000 for executables. You can check this value by
+ // running `objdump -private-headers <file>`.
+
+ textSegment := of.Segment("__TEXT")
+ if textSegment == nil {
+ return nil, fmt.Errorf("could not identify base for %s: no __TEXT segment", name)
+ }
+ if textSegment.Addr > start {
+ return nil, fmt.Errorf("could not identify base for %s: __TEXT segment address (0x%x) > mapping start address (0x%x)",
+ name, textSegment.Addr, start)
+ }
+
+ base := start - textSegment.Addr
+
+ if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) {
+ return &fileNM{file: file{b: b, name: name, base: base}}, nil
+ }
+ return &fileAddr2Line{file: file{b: b, name: name, base: base}}, nil
+}
+
+func (b *binrep) openFatMachO(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ of, err := macho.OpenFat(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer of.Close()
+
+ if len(of.Arches) == 0 {
+ return nil, fmt.Errorf("empty fat Mach-O file: %s", name)
+ }
+
+ var arch macho.Cpu
+ // Use the host architecture.
+ // TODO: This is not ideal because the host architecture may not be the one
+ // that was profiled. E.g. an amd64 host can profile a 386 program.
+ switch runtime.GOARCH {
+ case "386":
+ arch = macho.Cpu386
+ case "amd64", "amd64p32":
+ arch = macho.CpuAmd64
+ case "arm", "armbe", "arm64", "arm64be":
+ arch = macho.CpuArm
+ case "ppc":
+ arch = macho.CpuPpc
+ case "ppc64", "ppc64le":
+ arch = macho.CpuPpc64
+ default:
+ return nil, fmt.Errorf("unsupported host architecture for %s: %s", name, runtime.GOARCH)
+ }
+ for i := range of.Arches {
+ if of.Arches[i].Cpu == arch {
+ return b.openMachOCommon(name, of.Arches[i].File, start, limit, offset)
+ }
+ }
+ return nil, fmt.Errorf("architecture not found in %s: %s", name, runtime.GOARCH)
+}
+
+func (b *binrep) openMachO(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ of, err := macho.Open(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer of.Close()
+
+ return b.openMachOCommon(name, of, start, limit, offset)
+}
+
+func (b *binrep) openELF(name string, start, limit, offset uint64) (plugin.ObjFile, error) {
+ ef, err := elf.Open(name)
+ if err != nil {
+ return nil, fmt.Errorf("error parsing %s: %v", name, err)
+ }
+ defer ef.Close()
+
+ var stextOffset *uint64
+ var pageAligned = func(addr uint64) bool { return addr%4096 == 0 }
+ if strings.Contains(name, "vmlinux") || !pageAligned(start) || !pageAligned(limit) || !pageAligned(offset) {
+ // Reading all Symbols is expensive, and we only rarely need it so
+ // we don't want to do it every time. But if _stext happens to be
+ // page-aligned but isn't the same as Vaddr, we would symbolize
+ // wrong. So if the name the addresses aren't page aligned, or if
+ // the name is "vmlinux" we read _stext. We can be wrong if: (1)
+ // someone passes a kernel path that doesn't contain "vmlinux" AND
+ // (2) _stext is page-aligned AND (3) _stext is not at Vaddr
+ symbols, err := ef.Symbols()
+ if err != nil && err != elf.ErrNoSymbols {
+ return nil, err
+ }
+ for _, s := range symbols {
+ if s.Name == "_stext" {
+ // The kernel may use _stext as the mapping start address.
+ stextOffset = &s.Value
+ break
+ }
+ }
+ }
+
+ base, err := elfexec.GetBase(&ef.FileHeader, elfexec.FindTextProgHeader(ef), stextOffset, start, limit, offset)
+ if err != nil {
+ return nil, fmt.Errorf("could not identify base for %s: %v", name, err)
+ }
+
+ buildID := ""
+ if f, err := os.Open(name); err == nil {
+ if id, err := elfexec.GetBuildID(f); err == nil {
+ buildID = fmt.Sprintf("%x", id)
+ }
+ }
+ if b.fast || (!b.addr2lineFound && !b.llvmSymbolizerFound) {
+ return &fileNM{file: file{b, name, base, buildID}}, nil
+ }
+ return &fileAddr2Line{file: file{b, name, base, buildID}}, nil
+}
+
+// file implements the binutils.ObjFile interface.
+type file struct {
+ b *binrep
+ name string
+ base uint64
+ buildID string
+}
+
+func (f *file) Name() string {
+ return f.name
+}
+
+func (f *file) Base() uint64 {
+ return f.base
+}
+
+func (f *file) BuildID() string {
+ return f.buildID
+}
+
+func (f *file) SourceLine(addr uint64) ([]plugin.Frame, error) {
+ return []plugin.Frame{}, nil
+}
+
+func (f *file) Close() error {
+ return nil
+}
+
+func (f *file) Symbols(r *regexp.Regexp, addr uint64) ([]*plugin.Sym, error) {
+ // Get from nm a list of symbols sorted by address.
+ cmd := exec.Command(f.b.nm, "-n", f.name)
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("%v: %v", cmd.Args, err)
+ }
+
+ return findSymbols(out, f.name, r, addr)
+}
+
+// fileNM implements the binutils.ObjFile interface, using 'nm' to map
+// addresses to symbols (without file/line number information). It is
+// faster than fileAddr2Line.
+type fileNM struct {
+ file
+ addr2linernm *addr2LinerNM
+}
+
+func (f *fileNM) SourceLine(addr uint64) ([]plugin.Frame, error) {
+ if f.addr2linernm == nil {
+ addr2liner, err := newAddr2LinerNM(f.b.nm, f.name, f.base)
+ if err != nil {
+ return nil, err
+ }
+ f.addr2linernm = addr2liner
+ }
+ return f.addr2linernm.addrInfo(addr)
+}
+
+// fileAddr2Line implements the binutils.ObjFile interface, using
+// llvm-symbolizer, if that's available, or addr2line to map addresses to
+// symbols (with file/line number information). It can be slow for large
+// binaries with debug information.
+type fileAddr2Line struct {
+ once sync.Once
+ file
+ addr2liner *addr2Liner
+ llvmSymbolizer *llvmSymbolizer
+}
+
+func (f *fileAddr2Line) SourceLine(addr uint64) ([]plugin.Frame, error) {
+ f.once.Do(f.init)
+ if f.llvmSymbolizer != nil {
+ return f.llvmSymbolizer.addrInfo(addr)
+ }
+ if f.addr2liner != nil {
+ return f.addr2liner.addrInfo(addr)
+ }
+ return nil, fmt.Errorf("could not find local addr2liner")
+}
+
+func (f *fileAddr2Line) init() {
+ if llvmSymbolizer, err := newLLVMSymbolizer(f.b.llvmSymbolizer, f.name, f.base); err == nil {
+ f.llvmSymbolizer = llvmSymbolizer
+ return
+ }
+
+ if addr2liner, err := newAddr2Liner(f.b.addr2line, f.name, f.base); err == nil {
+ f.addr2liner = addr2liner
+
+ // When addr2line encounters some gcc compiled binaries, it
+ // drops interesting parts of names in anonymous namespaces.
+ // Fallback to NM for better function names.
+ if nm, err := newAddr2LinerNM(f.b.nm, f.name, f.base); err == nil {
+ f.addr2liner.nm = nm
+ }
+ }
+}
+
+func (f *fileAddr2Line) Close() error {
+ if f.llvmSymbolizer != nil {
+ f.llvmSymbolizer.rw.close()
+ f.llvmSymbolizer = nil
+ }
+ if f.addr2liner != nil {
+ f.addr2liner.rw.close()
+ f.addr2liner = nil
+ }
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go b/src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go
new file mode 100644
index 0000000..d0be614
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/binutils/disasm.go
@@ -0,0 +1,177 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package binutils
+
+import (
+ "bytes"
+ "io"
+ "regexp"
+ "strconv"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/ianlancetaylor/demangle"
+)
+
+var (
+ nmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+)\s+(.)\s+(.*)`)
+ objdumpAsmOutputRE = regexp.MustCompile(`^\s*([[:xdigit:]]+):\s+(.*)`)
+ objdumpOutputFileLine = regexp.MustCompile(`^;?\s?(.*):([0-9]+)`)
+ objdumpOutputFunction = regexp.MustCompile(`^;?\s?(\S.*)\(\):`)
+ objdumpOutputFunctionLLVM = regexp.MustCompile(`^([[:xdigit:]]+)?\s?(.*):`)
+)
+
+func findSymbols(syms []byte, file string, r *regexp.Regexp, address uint64) ([]*plugin.Sym, error) {
+ // Collect all symbols from the nm output, grouping names mapped to
+ // the same address into a single symbol.
+
+ // The symbols to return.
+ var symbols []*plugin.Sym
+
+ // The current group of symbol names, and the address they are all at.
+ names, start := []string{}, uint64(0)
+
+ buf := bytes.NewBuffer(syms)
+
+ for {
+ symAddr, name, err := nextSymbol(buf)
+ if err == io.EOF {
+ // Done. If there was an unfinished group, append it.
+ if len(names) != 0 {
+ if match := matchSymbol(names, start, symAddr-1, r, address); match != nil {
+ symbols = append(symbols, &plugin.Sym{Name: match, File: file, Start: start, End: symAddr - 1})
+ }
+ }
+
+ // And return the symbols.
+ return symbols, nil
+ }
+
+ if err != nil {
+ // There was some kind of serious error reading nm's output.
+ return nil, err
+ }
+
+ // If this symbol is at the same address as the current group, add it to the group.
+ if symAddr == start {
+ names = append(names, name)
+ continue
+ }
+
+ // Otherwise append the current group to the list of symbols.
+ if match := matchSymbol(names, start, symAddr-1, r, address); match != nil {
+ symbols = append(symbols, &plugin.Sym{Name: match, File: file, Start: start, End: symAddr - 1})
+ }
+
+ // And start a new group.
+ names, start = []string{name}, symAddr
+ }
+}
+
+// matchSymbol checks if a symbol is to be selected by checking its
+// name to the regexp and optionally its address. It returns the name(s)
+// to be used for the matched symbol, or nil if no match
+func matchSymbol(names []string, start, end uint64, r *regexp.Regexp, address uint64) []string {
+ if address != 0 && address >= start && address <= end {
+ return names
+ }
+ for _, name := range names {
+ if r == nil || r.MatchString(name) {
+ return []string{name}
+ }
+
+ // Match all possible demangled versions of the name.
+ for _, o := range [][]demangle.Option{
+ {demangle.NoClones},
+ {demangle.NoParams},
+ {demangle.NoParams, demangle.NoTemplateParams},
+ } {
+ if demangled, err := demangle.ToString(name, o...); err == nil && r.MatchString(demangled) {
+ return []string{demangled}
+ }
+ }
+ }
+ return nil
+}
+
+// disassemble parses the output of the objdump command and returns
+// the assembly instructions in a slice.
+func disassemble(asm []byte) ([]plugin.Inst, error) {
+ buf := bytes.NewBuffer(asm)
+ function, file, line := "", "", 0
+ var assembly []plugin.Inst
+ for {
+ input, err := buf.ReadString('\n')
+ if err != nil {
+ if err != io.EOF {
+ return nil, err
+ }
+ if input == "" {
+ break
+ }
+ }
+
+ if fields := objdumpAsmOutputRE.FindStringSubmatch(input); len(fields) == 3 {
+ if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil {
+ assembly = append(assembly,
+ plugin.Inst{
+ Addr: address,
+ Text: fields[2],
+ Function: function,
+ File: file,
+ Line: line,
+ })
+ continue
+ }
+ }
+ if fields := objdumpOutputFileLine.FindStringSubmatch(input); len(fields) == 3 {
+ if l, err := strconv.ParseUint(fields[2], 10, 32); err == nil {
+ file, line = fields[1], int(l)
+ }
+ continue
+ }
+ if fields := objdumpOutputFunction.FindStringSubmatch(input); len(fields) == 2 {
+ function = fields[1]
+ continue
+ } else {
+ if fields := objdumpOutputFunctionLLVM.FindStringSubmatch(input); len(fields) == 3 {
+ function = fields[2]
+ continue
+ }
+ }
+ // Reset on unrecognized lines.
+ function, file, line = "", "", 0
+ }
+
+ return assembly, nil
+}
+
+// nextSymbol parses the nm output to find the next symbol listed.
+// Skips over any output it cannot recognize.
+func nextSymbol(buf *bytes.Buffer) (uint64, string, error) {
+ for {
+ line, err := buf.ReadString('\n')
+ if err != nil {
+ if err != io.EOF || line == "" {
+ return 0, "", err
+ }
+ }
+
+ if fields := nmOutputRE.FindStringSubmatch(line); len(fields) == 4 {
+ if address, err := strconv.ParseUint(fields[1], 16, 64); err == nil {
+ return address, fields[3], nil
+ }
+ }
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go
new file mode 100644
index 0000000..492400c
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/cli.go
@@ -0,0 +1,367 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/google/pprof/internal/binutils"
+ "github.com/google/pprof/internal/plugin"
+)
+
+type source struct {
+ Sources []string
+ ExecName string
+ BuildID string
+ Base []string
+ DiffBase bool
+ Normalize bool
+
+ Seconds int
+ Timeout int
+ Symbolize string
+ HTTPHostport string
+ HTTPDisableBrowser bool
+ Comment string
+}
+
+// parseFlags parses the command lines through the specified flags package
+// and returns the source of the profile and optionally the command
+// for the kind of report to generate (nil for interactive use).
+func parseFlags(o *plugin.Options) (*source, []string, error) {
+ flag := o.Flagset
+ // Comparisons.
+ flagDiffBase := flag.StringList("diff_base", "", "Source of base profile for comparison")
+ flagBase := flag.StringList("base", "", "Source of base profile for profile subtraction")
+ // Source options.
+ flagSymbolize := flag.String("symbolize", "", "Options for profile symbolization")
+ flagBuildID := flag.String("buildid", "", "Override build id for first mapping")
+ flagTimeout := flag.Int("timeout", -1, "Timeout in seconds for fetching a profile")
+ flagAddComment := flag.String("add_comment", "", "Annotation string to record in the profile")
+ // CPU profile options
+ flagSeconds := flag.Int("seconds", -1, "Length of time for dynamic profiles")
+ // Heap profile options
+ flagInUseSpace := flag.Bool("inuse_space", false, "Display in-use memory size")
+ flagInUseObjects := flag.Bool("inuse_objects", false, "Display in-use object counts")
+ flagAllocSpace := flag.Bool("alloc_space", false, "Display allocated memory size")
+ flagAllocObjects := flag.Bool("alloc_objects", false, "Display allocated object counts")
+ // Contention profile options
+ flagTotalDelay := flag.Bool("total_delay", false, "Display total delay at each region")
+ flagContentions := flag.Bool("contentions", false, "Display number of delays at each region")
+ flagMeanDelay := flag.Bool("mean_delay", false, "Display mean delay at each region")
+ flagTools := flag.String("tools", os.Getenv("PPROF_TOOLS"), "Path for object tool pathnames")
+
+ flagHTTP := flag.String("http", "", "Present interactive web UI at the specified http host:port")
+ flagNoBrowser := flag.Bool("no_browser", false, "Skip opening a browswer for the interactive web UI")
+
+ // Flags that set configuration properties.
+ cfg := currentConfig()
+ configFlagSetter := installConfigFlags(flag, &cfg)
+
+ flagCommands := make(map[string]*bool)
+ flagParamCommands := make(map[string]*string)
+ for name, cmd := range pprofCommands {
+ if cmd.hasParam {
+ flagParamCommands[name] = flag.String(name, "", "Generate a report in "+name+" format, matching regexp")
+ } else {
+ flagCommands[name] = flag.Bool(name, false, "Generate a report in "+name+" format")
+ }
+ }
+
+ args := flag.Parse(func() {
+ o.UI.Print(usageMsgHdr +
+ usage(true) +
+ usageMsgSrc +
+ flag.ExtraUsage() +
+ usageMsgVars)
+ })
+ if len(args) == 0 {
+ return nil, nil, errors.New("no profile source specified")
+ }
+
+ var execName string
+ // Recognize first argument as an executable or buildid override.
+ if len(args) > 1 {
+ arg0 := args[0]
+ if file, err := o.Obj.Open(arg0, 0, ^uint64(0), 0); err == nil {
+ file.Close()
+ execName = arg0
+ args = args[1:]
+ } else if *flagBuildID == "" && isBuildID(arg0) {
+ *flagBuildID = arg0
+ args = args[1:]
+ }
+ }
+
+ // Apply any specified flags to cfg.
+ if err := configFlagSetter(); err != nil {
+ return nil, nil, err
+ }
+
+ cmd, err := outputFormat(flagCommands, flagParamCommands)
+ if err != nil {
+ return nil, nil, err
+ }
+ if cmd != nil && *flagHTTP != "" {
+ return nil, nil, errors.New("-http is not compatible with an output format on the command line")
+ }
+
+ if *flagNoBrowser && *flagHTTP == "" {
+ return nil, nil, errors.New("-no_browser only makes sense with -http")
+ }
+
+ si := cfg.SampleIndex
+ si = sampleIndex(flagTotalDelay, si, "delay", "-total_delay", o.UI)
+ si = sampleIndex(flagMeanDelay, si, "delay", "-mean_delay", o.UI)
+ si = sampleIndex(flagContentions, si, "contentions", "-contentions", o.UI)
+ si = sampleIndex(flagInUseSpace, si, "inuse_space", "-inuse_space", o.UI)
+ si = sampleIndex(flagInUseObjects, si, "inuse_objects", "-inuse_objects", o.UI)
+ si = sampleIndex(flagAllocSpace, si, "alloc_space", "-alloc_space", o.UI)
+ si = sampleIndex(flagAllocObjects, si, "alloc_objects", "-alloc_objects", o.UI)
+ cfg.SampleIndex = si
+
+ if *flagMeanDelay {
+ cfg.Mean = true
+ }
+
+ source := &source{
+ Sources: args,
+ ExecName: execName,
+ BuildID: *flagBuildID,
+ Seconds: *flagSeconds,
+ Timeout: *flagTimeout,
+ Symbolize: *flagSymbolize,
+ HTTPHostport: *flagHTTP,
+ HTTPDisableBrowser: *flagNoBrowser,
+ Comment: *flagAddComment,
+ }
+
+ if err := source.addBaseProfiles(*flagBase, *flagDiffBase); err != nil {
+ return nil, nil, err
+ }
+
+ normalize := cfg.Normalize
+ if normalize && len(source.Base) == 0 {
+ return nil, nil, errors.New("must have base profile to normalize by")
+ }
+ source.Normalize = normalize
+
+ if bu, ok := o.Obj.(*binutils.Binutils); ok {
+ bu.SetTools(*flagTools)
+ }
+
+ setCurrentConfig(cfg)
+ return source, cmd, nil
+}
+
+// addBaseProfiles adds the list of base profiles or diff base profiles to
+// the source. This function will return an error if both base and diff base
+// profiles are specified.
+func (source *source) addBaseProfiles(flagBase, flagDiffBase []*string) error {
+ base, diffBase := dropEmpty(flagBase), dropEmpty(flagDiffBase)
+ if len(base) > 0 && len(diffBase) > 0 {
+ return errors.New("-base and -diff_base flags cannot both be specified")
+ }
+
+ source.Base = base
+ if len(diffBase) > 0 {
+ source.Base, source.DiffBase = diffBase, true
+ }
+ return nil
+}
+
+// dropEmpty list takes a slice of string pointers, and outputs a slice of
+// non-empty strings associated with the flag.
+func dropEmpty(list []*string) []string {
+ var l []string
+ for _, s := range list {
+ if *s != "" {
+ l = append(l, *s)
+ }
+ }
+ return l
+}
+
+// installConfigFlags creates command line flags for configuration
+// fields and returns a function which can be called after flags have
+// been parsed to copy any flags specified on the command line to
+// *cfg.
+func installConfigFlags(flag plugin.FlagSet, cfg *config) func() error {
+ // List of functions for setting the different parts of a config.
+ var setters []func()
+ var err error // Holds any errors encountered while running setters.
+
+ for _, field := range configFields {
+ n := field.name
+ help := configHelp[n]
+ var setter func()
+ switch ptr := cfg.fieldPtr(field).(type) {
+ case *bool:
+ f := flag.Bool(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ case *int:
+ f := flag.Int(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ case *float64:
+ f := flag.Float64(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ case *string:
+ if len(field.choices) == 0 {
+ f := flag.String(n, *ptr, help)
+ setter = func() { *ptr = *f }
+ } else {
+ // Make a separate flag per possible choice.
+ // Set all flags to initially false so we can
+ // identify conflicts.
+ bools := make(map[string]*bool)
+ for _, choice := range field.choices {
+ bools[choice] = flag.Bool(choice, false, configHelp[choice])
+ }
+ setter = func() {
+ var set []string
+ for k, v := range bools {
+ if *v {
+ set = append(set, k)
+ }
+ }
+ switch len(set) {
+ case 0:
+ // Leave as default value.
+ case 1:
+ *ptr = set[0]
+ default:
+ err = fmt.Errorf("conflicting options set: %v", set)
+ }
+ }
+ }
+ }
+ setters = append(setters, setter)
+ }
+
+ return func() error {
+ // Apply the setter for every flag.
+ for _, setter := range setters {
+ setter()
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+}
+
+// isBuildID determines if the profile may contain a build ID, by
+// checking that it is a string of hex digits.
+func isBuildID(id string) bool {
+ return strings.Trim(id, "0123456789abcdefABCDEF") == ""
+}
+
+func sampleIndex(flag *bool, si string, sampleType, option string, ui plugin.UI) string {
+ if *flag {
+ if si == "" {
+ return sampleType
+ }
+ ui.PrintErr("Multiple value selections, ignoring ", option)
+ }
+ return si
+}
+
+func outputFormat(bcmd map[string]*bool, acmd map[string]*string) (cmd []string, err error) {
+ for n, b := range bcmd {
+ if *b {
+ if cmd != nil {
+ return nil, errors.New("must set at most one output format")
+ }
+ cmd = []string{n}
+ }
+ }
+ for n, s := range acmd {
+ if *s != "" {
+ if cmd != nil {
+ return nil, errors.New("must set at most one output format")
+ }
+ cmd = []string{n, *s}
+ }
+ }
+ return cmd, nil
+}
+
+var usageMsgHdr = `usage:
+
+Produce output in the specified format.
+
+ pprof <format> [options] [binary] <source> ...
+
+Omit the format to get an interactive shell whose commands can be used
+to generate various views of a profile
+
+ pprof [options] [binary] <source> ...
+
+Omit the format and provide the "-http" flag to get an interactive web
+interface at the specified host:port that can be used to navigate through
+various views of a profile.
+
+ pprof -http [host]:[port] [options] [binary] <source> ...
+
+Details:
+`
+
+var usageMsgSrc = "\n\n" +
+ " Source options:\n" +
+ " -seconds Duration for time-based profile collection\n" +
+ " -timeout Timeout in seconds for profile collection\n" +
+ " -buildid Override build id for main binary\n" +
+ " -add_comment Free-form annotation to add to the profile\n" +
+ " Displayed on some reports or with pprof -comments\n" +
+ " -diff_base source Source of base profile for comparison\n" +
+ " -base source Source of base profile for profile subtraction\n" +
+ " profile.pb.gz Profile in compressed protobuf format\n" +
+ " legacy_profile Profile in legacy pprof format\n" +
+ " http://host/profile URL for profile handler to retrieve\n" +
+ " -symbolize= Controls source of symbol information\n" +
+ " none Do not attempt symbolization\n" +
+ " local Examine only local binaries\n" +
+ " fastlocal Only get function names from local binaries\n" +
+ " remote Do not examine local binaries\n" +
+ " force Force re-symbolization\n" +
+ " Binary Local path or build id of binary for symbolization\n"
+
+var usageMsgVars = "\n\n" +
+ " Misc options:\n" +
+ " -http Provide web interface at host:port.\n" +
+ " Host is optional and 'localhost' by default.\n" +
+ " Port is optional and a randomly available port by default.\n" +
+ " -no_browser Skip opening a browser for the interactive web UI.\n" +
+ " -tools Search path for object tools\n" +
+ "\n" +
+ " Legacy convenience options:\n" +
+ " -inuse_space Same as -sample_index=inuse_space\n" +
+ " -inuse_objects Same as -sample_index=inuse_objects\n" +
+ " -alloc_space Same as -sample_index=alloc_space\n" +
+ " -alloc_objects Same as -sample_index=alloc_objects\n" +
+ " -total_delay Same as -sample_index=delay\n" +
+ " -contentions Same as -sample_index=contentions\n" +
+ " -mean_delay Same as -mean -sample_index=delay\n" +
+ "\n" +
+ " Environment Variables:\n" +
+ " PPROF_TMPDIR Location for saved profiles (default $HOME/pprof)\n" +
+ " PPROF_TOOLS Search path for object-level tools\n" +
+ " PPROF_BINARY_PATH Search path for local binary files\n" +
+ " default: $HOME/pprof/binaries\n" +
+ " searches $name, $path, $buildid/$name, $path/$buildid\n" +
+ " * On Windows, %USERPROFILE% is used instead of $HOME"
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go
new file mode 100644
index 0000000..4397e25
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/commands.go
@@ -0,0 +1,451 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+ "os/exec"
+ "runtime"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+)
+
+// commands describes the commands accepted by pprof.
+type commands map[string]*command
+
+// command describes the actions for a pprof command. Includes a
+// function for command-line completion, the report format to use
+// during report generation, any postprocessing functions, and whether
+// the command expects a regexp parameter (typically a function name).
+type command struct {
+ format int // report format to generate
+ postProcess PostProcessor // postprocessing to run on report
+ visualizer PostProcessor // display output using some callback
+ hasParam bool // collect a parameter from the CLI
+ description string // single-line description text saying what the command does
+ usage string // multi-line help text saying how the command is used
+}
+
+// help returns a help string for a command.
+func (c *command) help(name string) string {
+ message := c.description + "\n"
+ if c.usage != "" {
+ message += " Usage:\n"
+ lines := strings.Split(c.usage, "\n")
+ for _, line := range lines {
+ message += fmt.Sprintf(" %s\n", line)
+ }
+ }
+ return message + "\n"
+}
+
+// AddCommand adds an additional command to the set of commands
+// accepted by pprof. This enables extensions to add new commands for
+// specialized visualization formats. If the command specified already
+// exists, it is overwritten.
+func AddCommand(cmd string, format int, post PostProcessor, desc, usage string) {
+ pprofCommands[cmd] = &command{format, post, nil, false, desc, usage}
+}
+
+// SetVariableDefault sets the default value for a pprof
+// variable. This enables extensions to set their own defaults.
+func SetVariableDefault(variable, value string) {
+ configure(variable, value)
+}
+
+// PostProcessor is a function that applies post-processing to the report output
+type PostProcessor func(input io.Reader, output io.Writer, ui plugin.UI) error
+
+// interactiveMode is true if pprof is running on interactive mode, reading
+// commands from its shell.
+var interactiveMode = false
+
+// pprofCommands are the report generation commands recognized by pprof.
+var pprofCommands = commands{
+ // Commands that require no post-processing.
+ "comments": {report.Comments, nil, nil, false, "Output all profile comments", ""},
+ "disasm": {report.Dis, nil, nil, true, "Output assembly listings annotated with samples", listHelp("disasm", true)},
+ "dot": {report.Dot, nil, nil, false, "Outputs a graph in DOT format", reportHelp("dot", false, true)},
+ "list": {report.List, nil, nil, true, "Output annotated source for functions matching regexp", listHelp("list", false)},
+ "peek": {report.Tree, nil, nil, true, "Output callers/callees of functions matching regexp", "peek func_regex\nDisplay callers and callees of functions matching func_regex."},
+ "raw": {report.Raw, nil, nil, false, "Outputs a text representation of the raw profile", ""},
+ "tags": {report.Tags, nil, nil, false, "Outputs all tags in the profile", "tags [tag_regex]* [-ignore_regex]* [>file]\nList tags with key:value matching tag_regex and exclude ignore_regex."},
+ "text": {report.Text, nil, nil, false, "Outputs top entries in text form", reportHelp("text", true, true)},
+ "top": {report.Text, nil, nil, false, "Outputs top entries in text form", reportHelp("top", true, true)},
+ "traces": {report.Traces, nil, nil, false, "Outputs all profile samples in text form", ""},
+ "tree": {report.Tree, nil, nil, false, "Outputs a text rendering of call graph", reportHelp("tree", true, true)},
+
+ // Save binary formats to a file
+ "callgrind": {report.Callgrind, nil, awayFromTTY("callgraph.out"), false, "Outputs a graph in callgrind format", reportHelp("callgrind", false, true)},
+ "proto": {report.Proto, nil, awayFromTTY("pb.gz"), false, "Outputs the profile in compressed protobuf format", ""},
+ "topproto": {report.TopProto, nil, awayFromTTY("pb.gz"), false, "Outputs top entries in compressed protobuf format", ""},
+
+ // Generate report in DOT format and postprocess with dot
+ "gif": {report.Dot, invokeDot("gif"), awayFromTTY("gif"), false, "Outputs a graph image in GIF format", reportHelp("gif", false, true)},
+ "pdf": {report.Dot, invokeDot("pdf"), awayFromTTY("pdf"), false, "Outputs a graph in PDF format", reportHelp("pdf", false, true)},
+ "png": {report.Dot, invokeDot("png"), awayFromTTY("png"), false, "Outputs a graph image in PNG format", reportHelp("png", false, true)},
+ "ps": {report.Dot, invokeDot("ps"), awayFromTTY("ps"), false, "Outputs a graph in PS format", reportHelp("ps", false, true)},
+
+ // Save SVG output into a file
+ "svg": {report.Dot, massageDotSVG(), awayFromTTY("svg"), false, "Outputs a graph in SVG format", reportHelp("svg", false, true)},
+
+ // Visualize postprocessed dot output
+ "eog": {report.Dot, invokeDot("svg"), invokeVisualizer("svg", []string{"eog"}), false, "Visualize graph through eog", reportHelp("eog", false, false)},
+ "evince": {report.Dot, invokeDot("pdf"), invokeVisualizer("pdf", []string{"evince"}), false, "Visualize graph through evince", reportHelp("evince", false, false)},
+ "gv": {report.Dot, invokeDot("ps"), invokeVisualizer("ps", []string{"gv --noantialias"}), false, "Visualize graph through gv", reportHelp("gv", false, false)},
+ "web": {report.Dot, massageDotSVG(), invokeVisualizer("svg", browsers()), false, "Visualize graph through web browser", reportHelp("web", false, false)},
+
+ // Visualize callgrind output
+ "kcachegrind": {report.Callgrind, nil, invokeVisualizer("grind", kcachegrind), false, "Visualize report in KCachegrind", reportHelp("kcachegrind", false, false)},
+
+ // Visualize HTML directly generated by report.
+ "weblist": {report.WebList, nil, invokeVisualizer("html", browsers()), true, "Display annotated source in a web browser", listHelp("weblist", false)},
+}
+
+// configHelp contains help text per configuration parameter.
+var configHelp = map[string]string{
+ // Filename for file-based output formats, stdout by default.
+ "output": helpText("Output filename for file-based outputs"),
+
+ // Comparisons.
+ "drop_negative": helpText(
+ "Ignore negative differences",
+ "Do not show any locations with values <0."),
+
+ // Graph handling options.
+ "call_tree": helpText(
+ "Create a context-sensitive call tree",
+ "Treat locations reached through different paths as separate."),
+
+ // Display options.
+ "relative_percentages": helpText(
+ "Show percentages relative to focused subgraph",
+ "If unset, percentages are relative to full graph before focusing",
+ "to facilitate comparison with original graph."),
+ "unit": helpText(
+ "Measurement units to display",
+ "Scale the sample values to this unit.",
+ "For time-based profiles, use seconds, milliseconds, nanoseconds, etc.",
+ "For memory profiles, use megabytes, kilobytes, bytes, etc.",
+ "Using auto will scale each value independently to the most natural unit."),
+ "compact_labels": "Show minimal headers",
+ "source_path": "Search path for source files",
+ "trim_path": "Path to trim from source paths before search",
+ "intel_syntax": helpText(
+ "Show assembly in Intel syntax",
+ "Only applicable to commands `disasm` and `weblist`"),
+
+ // Filtering options
+ "nodecount": helpText(
+ "Max number of nodes to show",
+ "Uses heuristics to limit the number of locations to be displayed.",
+ "On graphs, dotted edges represent paths through nodes that have been removed."),
+ "nodefraction": "Hide nodes below <f>*total",
+ "edgefraction": "Hide edges below <f>*total",
+ "trim": helpText(
+ "Honor nodefraction/edgefraction/nodecount defaults",
+ "Set to false to get the full profile, without any trimming."),
+ "focus": helpText(
+ "Restricts to samples going through a node matching regexp",
+ "Discard samples that do not include a node matching this regexp.",
+ "Matching includes the function name, filename or object name."),
+ "ignore": helpText(
+ "Skips paths going through any nodes matching regexp",
+ "If set, discard samples that include a node matching this regexp.",
+ "Matching includes the function name, filename or object name."),
+ "prune_from": helpText(
+ "Drops any functions below the matched frame.",
+ "If set, any frames matching the specified regexp and any frames",
+ "below it will be dropped from each sample."),
+ "hide": helpText(
+ "Skips nodes matching regexp",
+ "Discard nodes that match this location.",
+ "Other nodes from samples that include this location will be shown.",
+ "Matching includes the function name, filename or object name."),
+ "show": helpText(
+ "Only show nodes matching regexp",
+ "If set, only show nodes that match this location.",
+ "Matching includes the function name, filename or object name."),
+ "show_from": helpText(
+ "Drops functions above the highest matched frame.",
+ "If set, all frames above the highest match are dropped from every sample.",
+ "Matching includes the function name, filename or object name."),
+ "tagfocus": helpText(
+ "Restricts to samples with tags in range or matched by regexp",
+ "Use name=value syntax to limit the matching to a specific tag.",
+ "Numeric tag filter examples: 1kb, 1kb:10kb, memory=32mb:",
+ "String tag filter examples: foo, foo.*bar, mytag=foo.*bar"),
+ "tagignore": helpText(
+ "Discard samples with tags in range or matched by regexp",
+ "Use name=value syntax to limit the matching to a specific tag.",
+ "Numeric tag filter examples: 1kb, 1kb:10kb, memory=32mb:",
+ "String tag filter examples: foo, foo.*bar, mytag=foo.*bar"),
+ "tagshow": helpText(
+ "Only consider tags matching this regexp",
+ "Discard tags that do not match this regexp"),
+ "taghide": helpText(
+ "Skip tags matching this regexp",
+ "Discard tags that match this regexp"),
+ // Heap profile options
+ "divide_by": helpText(
+ "Ratio to divide all samples before visualization",
+ "Divide all samples values by a constant, eg the number of processors or jobs."),
+ "mean": helpText(
+ "Average sample value over first value (count)",
+ "For memory profiles, report average memory per allocation.",
+ "For time-based profiles, report average time per event."),
+ "sample_index": helpText(
+ "Sample value to report (0-based index or name)",
+ "Profiles contain multiple values per sample.",
+ "Use sample_index=i to select the ith value (starting at 0)."),
+ "normalize": helpText(
+ "Scales profile based on the base profile."),
+
+ // Data sorting criteria
+ "flat": helpText("Sort entries based on own weight"),
+ "cum": helpText("Sort entries based on cumulative weight"),
+
+ // Output granularity
+ "functions": helpText(
+ "Aggregate at the function level.",
+ "Ignores the filename where the function was defined."),
+ "filefunctions": helpText(
+ "Aggregate at the function level.",
+ "Takes into account the filename where the function was defined."),
+ "files": "Aggregate at the file level.",
+ "lines": "Aggregate at the source code line level.",
+ "addresses": helpText(
+ "Aggregate at the address level.",
+ "Includes functions' addresses in the output."),
+ "noinlines": helpText(
+ "Ignore inlines.",
+ "Attributes inlined functions to their first out-of-line caller."),
+}
+
+func helpText(s ...string) string {
+ return strings.Join(s, "\n") + "\n"
+}
+
+// usage returns a string describing the pprof commands and configuration
+// options. if commandLine is set, the output reflect cli usage.
+func usage(commandLine bool) string {
+ var prefix string
+ if commandLine {
+ prefix = "-"
+ }
+ fmtHelp := func(c, d string) string {
+ return fmt.Sprintf(" %-16s %s", c, strings.SplitN(d, "\n", 2)[0])
+ }
+
+ var commands []string
+ for name, cmd := range pprofCommands {
+ commands = append(commands, fmtHelp(prefix+name, cmd.description))
+ }
+ sort.Strings(commands)
+
+ var help string
+ if commandLine {
+ help = " Output formats (select at most one):\n"
+ } else {
+ help = " Commands:\n"
+ commands = append(commands, fmtHelp("o/options", "List options and their current values"))
+ commands = append(commands, fmtHelp("q/quit/exit/^D", "Exit pprof"))
+ }
+
+ help = help + strings.Join(commands, "\n") + "\n\n" +
+ " Options:\n"
+
+ // Print help for configuration options after sorting them.
+ // Collect choices for multi-choice options print them together.
+ var variables []string
+ var radioStrings []string
+ for _, f := range configFields {
+ if len(f.choices) == 0 {
+ variables = append(variables, fmtHelp(prefix+f.name, configHelp[f.name]))
+ continue
+ }
+ // Format help for for this group.
+ s := []string{fmtHelp(f.name, "")}
+ for _, choice := range f.choices {
+ s = append(s, " "+fmtHelp(prefix+choice, configHelp[choice]))
+ }
+ radioStrings = append(radioStrings, strings.Join(s, "\n"))
+ }
+ sort.Strings(variables)
+ sort.Strings(radioStrings)
+ return help + strings.Join(variables, "\n") + "\n\n" +
+ " Option groups (only set one per group):\n" +
+ strings.Join(radioStrings, "\n")
+}
+
+func reportHelp(c string, cum, redirect bool) string {
+ h := []string{
+ c + " [n] [focus_regex]* [-ignore_regex]*",
+ "Include up to n samples",
+ "Include samples matching focus_regex, and exclude ignore_regex.",
+ }
+ if cum {
+ h[0] += " [-cum]"
+ h = append(h, "-cum sorts the output by cumulative weight")
+ }
+ if redirect {
+ h[0] += " >f"
+ h = append(h, "Optionally save the report on the file f")
+ }
+ return strings.Join(h, "\n")
+}
+
+func listHelp(c string, redirect bool) string {
+ h := []string{
+ c + "<func_regex|address> [-focus_regex]* [-ignore_regex]*",
+ "Include functions matching func_regex, or including the address specified.",
+ "Include samples matching focus_regex, and exclude ignore_regex.",
+ }
+ if redirect {
+ h[0] += " >f"
+ h = append(h, "Optionally save the report on the file f")
+ }
+ return strings.Join(h, "\n")
+}
+
+// browsers returns a list of commands to attempt for web visualization.
+func browsers() []string {
+ var cmds []string
+ if userBrowser := os.Getenv("BROWSER"); userBrowser != "" {
+ cmds = append(cmds, userBrowser)
+ }
+ switch runtime.GOOS {
+ case "darwin":
+ cmds = append(cmds, "/usr/bin/open")
+ case "windows":
+ cmds = append(cmds, "cmd /c start")
+ default:
+ // Commands opening browsers are prioritized over xdg-open, so browser()
+ // command can be used on linux to open the .svg file generated by the -web
+ // command (the .svg file includes embedded javascript so is best viewed in
+ // a browser).
+ cmds = append(cmds, []string{"chrome", "google-chrome", "chromium", "firefox", "sensible-browser"}...)
+ if os.Getenv("DISPLAY") != "" {
+ // xdg-open is only for use in a desktop environment.
+ cmds = append(cmds, "xdg-open")
+ }
+ }
+ return cmds
+}
+
+var kcachegrind = []string{"kcachegrind"}
+
+// awayFromTTY saves the output in a file if it would otherwise go to
+// the terminal screen. This is used to avoid dumping binary data on
+// the screen.
+func awayFromTTY(format string) PostProcessor {
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ if output == os.Stdout && (ui.IsTerminal() || interactiveMode) {
+ tempFile, err := newTempFile("", "profile", "."+format)
+ if err != nil {
+ return err
+ }
+ ui.PrintErr("Generating report in ", tempFile.Name())
+ output = tempFile
+ }
+ _, err := io.Copy(output, input)
+ return err
+ }
+}
+
+func invokeDot(format string) PostProcessor {
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ cmd := exec.Command("dot", "-T"+format)
+ cmd.Stdin, cmd.Stdout, cmd.Stderr = input, output, os.Stderr
+ if err := cmd.Run(); err != nil {
+ return fmt.Errorf("failed to execute dot. Is Graphviz installed? Error: %v", err)
+ }
+ return nil
+ }
+}
+
+// massageDotSVG invokes the dot tool to generate an SVG image and alters
+// the image to have panning capabilities when viewed in a browser.
+func massageDotSVG() PostProcessor {
+ generateSVG := invokeDot("svg")
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ baseSVG := new(bytes.Buffer)
+ if err := generateSVG(input, baseSVG, ui); err != nil {
+ return err
+ }
+ _, err := output.Write([]byte(massageSVG(baseSVG.String())))
+ return err
+ }
+}
+
+func invokeVisualizer(suffix string, visualizers []string) PostProcessor {
+ return func(input io.Reader, output io.Writer, ui plugin.UI) error {
+ tempFile, err := newTempFile(os.TempDir(), "pprof", "."+suffix)
+ if err != nil {
+ return err
+ }
+ deferDeleteTempFile(tempFile.Name())
+ if _, err := io.Copy(tempFile, input); err != nil {
+ return err
+ }
+ tempFile.Close()
+ // Try visualizers until one is successful
+ for _, v := range visualizers {
+ // Separate command and arguments for exec.Command.
+ args := strings.Split(v, " ")
+ if len(args) == 0 {
+ continue
+ }
+ viewer := exec.Command(args[0], append(args[1:], tempFile.Name())...)
+ viewer.Stderr = os.Stderr
+ if err = viewer.Start(); err == nil {
+ // Wait for a second so that the visualizer has a chance to
+ // open the input file. This needs to be done even if we're
+ // waiting for the visualizer as it can be just a wrapper that
+ // spawns a browser tab and returns right away.
+ defer func(t <-chan time.Time) {
+ <-t
+ }(time.After(time.Second))
+ // On interactive mode, let the visualizer run in the background
+ // so other commands can be issued.
+ if !interactiveMode {
+ return viewer.Wait()
+ }
+ return nil
+ }
+ }
+ return err
+ }
+}
+
+// stringToBool is a custom parser for bools. We avoid using strconv.ParseBool
+// to remain compatible with old pprof behavior (e.g., treating "" as true).
+func stringToBool(s string) (bool, error) {
+ switch strings.ToLower(s) {
+ case "true", "t", "yes", "y", "1", "":
+ return true, nil
+ case "false", "f", "no", "n", "0":
+ return false, nil
+ default:
+ return false, fmt.Errorf(`illegal value "%s" for bool variable`, s)
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/config.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/config.go
new file mode 100644
index 0000000..b3f82f2
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/config.go
@@ -0,0 +1,367 @@
+package driver
+
+import (
+ "fmt"
+ "net/url"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+)
+
+// config holds settings for a single named config.
+// The JSON tag name for a field is used both for JSON encoding and as
+// a named variable.
+type config struct {
+ // Filename for file-based output formats, stdout by default.
+ Output string `json:"-"`
+
+ // Display options.
+ CallTree bool `json:"call_tree,omitempty"`
+ RelativePercentages bool `json:"relative_percentages,omitempty"`
+ Unit string `json:"unit,omitempty"`
+ CompactLabels bool `json:"compact_labels,omitempty"`
+ SourcePath string `json:"-"`
+ TrimPath string `json:"-"`
+ IntelSyntax bool `json:"intel_syntax,omitempty"`
+ Mean bool `json:"mean,omitempty"`
+ SampleIndex string `json:"-"`
+ DivideBy float64 `json:"-"`
+ Normalize bool `json:"normalize,omitempty"`
+ Sort string `json:"sort,omitempty"`
+
+ // Filtering options
+ DropNegative bool `json:"drop_negative,omitempty"`
+ NodeCount int `json:"nodecount,omitempty"`
+ NodeFraction float64 `json:"nodefraction,omitempty"`
+ EdgeFraction float64 `json:"edgefraction,omitempty"`
+ Trim bool `json:"trim,omitempty"`
+ Focus string `json:"focus,omitempty"`
+ Ignore string `json:"ignore,omitempty"`
+ PruneFrom string `json:"prune_from,omitempty"`
+ Hide string `json:"hide,omitempty"`
+ Show string `json:"show,omitempty"`
+ ShowFrom string `json:"show_from,omitempty"`
+ TagFocus string `json:"tagfocus,omitempty"`
+ TagIgnore string `json:"tagignore,omitempty"`
+ TagShow string `json:"tagshow,omitempty"`
+ TagHide string `json:"taghide,omitempty"`
+ NoInlines bool `json:"noinlines,omitempty"`
+
+ // Output granularity
+ Granularity string `json:"granularity,omitempty"`
+}
+
+// defaultConfig returns the default configuration values; it is unaffected by
+// flags and interactive assignments.
+func defaultConfig() config {
+ return config{
+ Unit: "minimum",
+ NodeCount: -1,
+ NodeFraction: 0.005,
+ EdgeFraction: 0.001,
+ Trim: true,
+ DivideBy: 1.0,
+ Sort: "flat",
+ Granularity: "functions",
+ }
+}
+
+// currentConfig holds the current configuration values; it is affected by
+// flags and interactive assignments.
+var currentCfg = defaultConfig()
+var currentMu sync.Mutex
+
+func currentConfig() config {
+ currentMu.Lock()
+ defer currentMu.Unlock()
+ return currentCfg
+}
+
+func setCurrentConfig(cfg config) {
+ currentMu.Lock()
+ defer currentMu.Unlock()
+ currentCfg = cfg
+}
+
+// configField contains metadata for a single configuration field.
+type configField struct {
+ name string // JSON field name/key in variables
+ urlparam string // URL parameter name
+ saved bool // Is field saved in settings?
+ field reflect.StructField // Field in config
+ choices []string // Name Of variables in group
+ defaultValue string // Default value for this field.
+}
+
+var (
+ configFields []configField // Precomputed metadata per config field
+
+ // configFieldMap holds an entry for every config field as well as an
+ // entry for every valid choice for a multi-choice field.
+ configFieldMap map[string]configField
+)
+
+func init() {
+ // Config names for fields that are not saved in settings and therefore
+ // do not have a JSON name.
+ notSaved := map[string]string{
+ // Not saved in settings, but present in URLs.
+ "SampleIndex": "sample_index",
+
+ // Following fields are also not placed in URLs.
+ "Output": "output",
+ "SourcePath": "source_path",
+ "TrimPath": "trim_path",
+ "DivideBy": "divide_by",
+ }
+
+ // choices holds the list of allowed values for config fields that can
+ // take on one of a bounded set of values.
+ choices := map[string][]string{
+ "sort": {"cum", "flat"},
+ "granularity": {"functions", "filefunctions", "files", "lines", "addresses"},
+ }
+
+ // urlparam holds the mapping from a config field name to the URL
+ // parameter used to hold that config field. If no entry is present for
+ // a name, the corresponding field is not saved in URLs.
+ urlparam := map[string]string{
+ "drop_negative": "dropneg",
+ "call_tree": "calltree",
+ "relative_percentages": "rel",
+ "unit": "unit",
+ "compact_labels": "compact",
+ "intel_syntax": "intel",
+ "nodecount": "n",
+ "nodefraction": "nf",
+ "edgefraction": "ef",
+ "trim": "trim",
+ "focus": "f",
+ "ignore": "i",
+ "prune_from": "prunefrom",
+ "hide": "h",
+ "show": "s",
+ "show_from": "sf",
+ "tagfocus": "tf",
+ "tagignore": "ti",
+ "tagshow": "ts",
+ "taghide": "th",
+ "mean": "mean",
+ "sample_index": "si",
+ "normalize": "norm",
+ "sort": "sort",
+ "granularity": "g",
+ "noinlines": "noinlines",
+ }
+
+ def := defaultConfig()
+ configFieldMap = map[string]configField{}
+ t := reflect.TypeOf(config{})
+ for i, n := 0, t.NumField(); i < n; i++ {
+ field := t.Field(i)
+ js := strings.Split(field.Tag.Get("json"), ",")
+ if len(js) == 0 {
+ continue
+ }
+ // Get the configuration name for this field.
+ name := js[0]
+ if name == "-" {
+ name = notSaved[field.Name]
+ if name == "" {
+ // Not a configurable field.
+ continue
+ }
+ }
+ f := configField{
+ name: name,
+ urlparam: urlparam[name],
+ saved: (name == js[0]),
+ field: field,
+ choices: choices[name],
+ }
+ f.defaultValue = def.get(f)
+ configFields = append(configFields, f)
+ configFieldMap[f.name] = f
+ for _, choice := range f.choices {
+ configFieldMap[choice] = f
+ }
+ }
+}
+
+// fieldPtr returns a pointer to the field identified by f in *cfg.
+func (cfg *config) fieldPtr(f configField) interface{} {
+ // reflect.ValueOf: converts to reflect.Value
+ // Elem: dereferences cfg to make *cfg
+ // FieldByIndex: fetches the field
+ // Addr: takes address of field
+ // Interface: converts back from reflect.Value to a regular value
+ return reflect.ValueOf(cfg).Elem().FieldByIndex(f.field.Index).Addr().Interface()
+}
+
+// get returns the value of field f in cfg.
+func (cfg *config) get(f configField) string {
+ switch ptr := cfg.fieldPtr(f).(type) {
+ case *string:
+ return *ptr
+ case *int:
+ return fmt.Sprint(*ptr)
+ case *float64:
+ return fmt.Sprint(*ptr)
+ case *bool:
+ return fmt.Sprint(*ptr)
+ }
+ panic(fmt.Sprintf("unsupported config field type %v", f.field.Type))
+}
+
+// set sets the value of field f in cfg to value.
+func (cfg *config) set(f configField, value string) error {
+ switch ptr := cfg.fieldPtr(f).(type) {
+ case *string:
+ if len(f.choices) > 0 {
+ // Verify that value is one of the allowed choices.
+ for _, choice := range f.choices {
+ if choice == value {
+ *ptr = value
+ return nil
+ }
+ }
+ return fmt.Errorf("invalid %q value %q", f.name, value)
+ }
+ *ptr = value
+ case *int:
+ v, err := strconv.Atoi(value)
+ if err != nil {
+ return err
+ }
+ *ptr = v
+ case *float64:
+ v, err := strconv.ParseFloat(value, 64)
+ if err != nil {
+ return err
+ }
+ *ptr = v
+ case *bool:
+ v, err := stringToBool(value)
+ if err != nil {
+ return err
+ }
+ *ptr = v
+ default:
+ panic(fmt.Sprintf("unsupported config field type %v", f.field.Type))
+ }
+ return nil
+}
+
+// isConfigurable returns true if name is either the name of a config field, or
+// a valid value for a multi-choice config field.
+func isConfigurable(name string) bool {
+ _, ok := configFieldMap[name]
+ return ok
+}
+
+// isBoolConfig returns true if name is either name of a boolean config field,
+// or a valid value for a multi-choice config field.
+func isBoolConfig(name string) bool {
+ f, ok := configFieldMap[name]
+ if !ok {
+ return false
+ }
+ if name != f.name {
+ return true // name must be one possible value for the field
+ }
+ var cfg config
+ _, ok = cfg.fieldPtr(f).(*bool)
+ return ok
+}
+
+// completeConfig returns the list of configurable names starting with prefix.
+func completeConfig(prefix string) []string {
+ var result []string
+ for v := range configFieldMap {
+ if strings.HasPrefix(v, prefix) {
+ result = append(result, v)
+ }
+ }
+ return result
+}
+
+// configure stores the name=value mapping into the current config, correctly
+// handling the case when name identifies a particular choice in a field.
+func configure(name, value string) error {
+ currentMu.Lock()
+ defer currentMu.Unlock()
+ f, ok := configFieldMap[name]
+ if !ok {
+ return fmt.Errorf("unknown config field %q", name)
+ }
+ if f.name == name {
+ return currentCfg.set(f, value)
+ }
+ // name must be one of the choices. If value is true, set field-value
+ // to name.
+ if v, err := strconv.ParseBool(value); v && err == nil {
+ return currentCfg.set(f, name)
+ }
+ return fmt.Errorf("unknown config field %q", name)
+}
+
+// resetTransient sets all transient fields in *cfg to their currently
+// configured values.
+func (cfg *config) resetTransient() {
+ current := currentConfig()
+ cfg.Output = current.Output
+ cfg.SourcePath = current.SourcePath
+ cfg.TrimPath = current.TrimPath
+ cfg.DivideBy = current.DivideBy
+ cfg.SampleIndex = current.SampleIndex
+}
+
+// applyURL updates *cfg based on params.
+func (cfg *config) applyURL(params url.Values) error {
+ for _, f := range configFields {
+ var value string
+ if f.urlparam != "" {
+ value = params.Get(f.urlparam)
+ }
+ if value == "" {
+ continue
+ }
+ if err := cfg.set(f, value); err != nil {
+ return fmt.Errorf("error setting config field %s: %v", f.name, err)
+ }
+ }
+ return nil
+}
+
+// makeURL returns a URL based on initialURL that contains the config contents
+// as parameters. The second result is true iff a parameter value was changed.
+func (cfg *config) makeURL(initialURL url.URL) (url.URL, bool) {
+ q := initialURL.Query()
+ changed := false
+ for _, f := range configFields {
+ if f.urlparam == "" || !f.saved {
+ continue
+ }
+ v := cfg.get(f)
+ if v == f.defaultValue {
+ v = "" // URL for of default value is the empty string.
+ } else if f.field.Type.Kind() == reflect.Bool {
+ // Shorten bool values to "f" or "t"
+ v = v[:1]
+ }
+ if q.Get(f.urlparam) == v {
+ continue
+ }
+ changed = true
+ if v == "" {
+ q.Del(f.urlparam)
+ } else {
+ q.Set(f.urlparam, v)
+ }
+ }
+ if changed {
+ initialURL.RawQuery = q.Encode()
+ }
+ return initialURL, changed
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go
new file mode 100644
index 0000000..878f2e1
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver.go
@@ -0,0 +1,340 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package driver implements the core pprof functionality. It can be
+// parameterized with a flag implementation, fetch and symbolize
+// mechanisms.
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+ "github.com/google/pprof/profile"
+)
+
+// PProf acquires a profile, and symbolizes it using a profile
+// manager. Then it generates a report formatted according to the
+// options selected through the flags package.
+func PProf(eo *plugin.Options) error {
+ // Remove any temporary files created during pprof processing.
+ defer cleanupTempFiles()
+
+ o := setDefaults(eo)
+
+ src, cmd, err := parseFlags(o)
+ if err != nil {
+ return err
+ }
+
+ p, err := fetchProfiles(src, o)
+ if err != nil {
+ return err
+ }
+
+ if cmd != nil {
+ return generateReport(p, cmd, currentConfig(), o)
+ }
+
+ if src.HTTPHostport != "" {
+ return serveWebInterface(src.HTTPHostport, p, o, src.HTTPDisableBrowser)
+ }
+ return interactive(p, o)
+}
+
+func generateRawReport(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) (*command, *report.Report, error) {
+ p = p.Copy() // Prevent modification to the incoming profile.
+
+ // Identify units of numeric tags in profile.
+ numLabelUnits := identifyNumLabelUnits(p, o.UI)
+
+ // Get report output format
+ c := pprofCommands[cmd[0]]
+ if c == nil {
+ panic("unexpected nil command")
+ }
+
+ cfg = applyCommandOverrides(cmd[0], c.format, cfg)
+
+ // Delay focus after configuring report to get percentages on all samples.
+ relative := cfg.RelativePercentages
+ if relative {
+ if err := applyFocus(p, numLabelUnits, cfg, o.UI); err != nil {
+ return nil, nil, err
+ }
+ }
+ ropt, err := reportOptions(p, numLabelUnits, cfg)
+ if err != nil {
+ return nil, nil, err
+ }
+ ropt.OutputFormat = c.format
+ if len(cmd) == 2 {
+ s, err := regexp.Compile(cmd[1])
+ if err != nil {
+ return nil, nil, fmt.Errorf("parsing argument regexp %s: %v", cmd[1], err)
+ }
+ ropt.Symbol = s
+ }
+
+ rpt := report.New(p, ropt)
+ if !relative {
+ if err := applyFocus(p, numLabelUnits, cfg, o.UI); err != nil {
+ return nil, nil, err
+ }
+ }
+ if err := aggregate(p, cfg); err != nil {
+ return nil, nil, err
+ }
+
+ return c, rpt, nil
+}
+
+func generateReport(p *profile.Profile, cmd []string, cfg config, o *plugin.Options) error {
+ c, rpt, err := generateRawReport(p, cmd, cfg, o)
+ if err != nil {
+ return err
+ }
+
+ // Generate the report.
+ dst := new(bytes.Buffer)
+ if err := report.Generate(dst, rpt, o.Obj); err != nil {
+ return err
+ }
+ src := dst
+
+ // If necessary, perform any data post-processing.
+ if c.postProcess != nil {
+ dst = new(bytes.Buffer)
+ if err := c.postProcess(src, dst, o.UI); err != nil {
+ return err
+ }
+ src = dst
+ }
+
+ // If no output is specified, use default visualizer.
+ output := cfg.Output
+ if output == "" {
+ if c.visualizer != nil {
+ return c.visualizer(src, os.Stdout, o.UI)
+ }
+ _, err := src.WriteTo(os.Stdout)
+ return err
+ }
+
+ // Output to specified file.
+ o.UI.PrintErr("Generating report in ", output)
+ out, err := o.Writer.Open(output)
+ if err != nil {
+ return err
+ }
+ if _, err := src.WriteTo(out); err != nil {
+ out.Close()
+ return err
+ }
+ return out.Close()
+}
+
+func applyCommandOverrides(cmd string, outputFormat int, cfg config) config {
+ // Some report types override the trim flag to false below. This is to make
+ // sure the default heuristics of excluding insignificant nodes and edges
+ // from the call graph do not apply. One example where it is important is
+ // annotated source or disassembly listing. Those reports run on a specific
+ // function (or functions), but the trimming is applied before the function
+ // data is selected. So, with trimming enabled, the report could end up
+ // showing no data if the specified function is "uninteresting" as far as the
+ // trimming is concerned.
+ trim := cfg.Trim
+
+ switch cmd {
+ case "disasm", "weblist":
+ trim = false
+ cfg.Granularity = "addresses"
+ // Force the 'noinlines' mode so that source locations for a given address
+ // collapse and there is only one for the given address. Without this
+ // cumulative metrics would be double-counted when annotating the assembly.
+ // This is because the merge is done by address and in case of an inlined
+ // stack each of the inlined entries is a separate callgraph node.
+ cfg.NoInlines = true
+ case "peek":
+ trim = false
+ case "list":
+ trim = false
+ cfg.Granularity = "lines"
+ // Do not force 'noinlines' to be false so that specifying
+ // "-list foo -noinlines" is supported and works as expected.
+ case "text", "top", "topproto":
+ if cfg.NodeCount == -1 {
+ cfg.NodeCount = 0
+ }
+ default:
+ if cfg.NodeCount == -1 {
+ cfg.NodeCount = 80
+ }
+ }
+
+ switch outputFormat {
+ case report.Proto, report.Raw, report.Callgrind:
+ trim = false
+ cfg.Granularity = "addresses"
+ cfg.NoInlines = false
+ }
+
+ if !trim {
+ cfg.NodeCount = 0
+ cfg.NodeFraction = 0
+ cfg.EdgeFraction = 0
+ }
+ return cfg
+}
+
+func aggregate(prof *profile.Profile, cfg config) error {
+ var function, filename, linenumber, address bool
+ inlines := !cfg.NoInlines
+ switch cfg.Granularity {
+ case "addresses":
+ if inlines {
+ return nil
+ }
+ function = true
+ filename = true
+ linenumber = true
+ address = true
+ case "lines":
+ function = true
+ filename = true
+ linenumber = true
+ case "files":
+ filename = true
+ case "functions":
+ function = true
+ case "filefunctions":
+ function = true
+ filename = true
+ default:
+ return fmt.Errorf("unexpected granularity")
+ }
+ return prof.Aggregate(inlines, function, filename, linenumber, address)
+}
+
+func reportOptions(p *profile.Profile, numLabelUnits map[string]string, cfg config) (*report.Options, error) {
+ si, mean := cfg.SampleIndex, cfg.Mean
+ value, meanDiv, sample, err := sampleFormat(p, si, mean)
+ if err != nil {
+ return nil, err
+ }
+
+ stype := sample.Type
+ if mean {
+ stype = "mean_" + stype
+ }
+
+ if cfg.DivideBy == 0 {
+ return nil, fmt.Errorf("zero divisor specified")
+ }
+
+ var filters []string
+ addFilter := func(k string, v string) {
+ if v != "" {
+ filters = append(filters, k+"="+v)
+ }
+ }
+ addFilter("focus", cfg.Focus)
+ addFilter("ignore", cfg.Ignore)
+ addFilter("hide", cfg.Hide)
+ addFilter("show", cfg.Show)
+ addFilter("show_from", cfg.ShowFrom)
+ addFilter("tagfocus", cfg.TagFocus)
+ addFilter("tagignore", cfg.TagIgnore)
+ addFilter("tagshow", cfg.TagShow)
+ addFilter("taghide", cfg.TagHide)
+
+ ropt := &report.Options{
+ CumSort: cfg.Sort == "cum",
+ CallTree: cfg.CallTree,
+ DropNegative: cfg.DropNegative,
+
+ CompactLabels: cfg.CompactLabels,
+ Ratio: 1 / cfg.DivideBy,
+
+ NodeCount: cfg.NodeCount,
+ NodeFraction: cfg.NodeFraction,
+ EdgeFraction: cfg.EdgeFraction,
+
+ ActiveFilters: filters,
+ NumLabelUnits: numLabelUnits,
+
+ SampleValue: value,
+ SampleMeanDivisor: meanDiv,
+ SampleType: stype,
+ SampleUnit: sample.Unit,
+
+ OutputUnit: cfg.Unit,
+
+ SourcePath: cfg.SourcePath,
+ TrimPath: cfg.TrimPath,
+
+ IntelSyntax: cfg.IntelSyntax,
+ }
+
+ if len(p.Mapping) > 0 && p.Mapping[0].File != "" {
+ ropt.Title = filepath.Base(p.Mapping[0].File)
+ }
+
+ return ropt, nil
+}
+
+// identifyNumLabelUnits returns a map of numeric label keys to the units
+// associated with those keys.
+func identifyNumLabelUnits(p *profile.Profile, ui plugin.UI) map[string]string {
+ numLabelUnits, ignoredUnits := p.NumLabelUnits()
+
+ // Print errors for tags with multiple units associated with
+ // a single key.
+ for k, units := range ignoredUnits {
+ ui.PrintErr(fmt.Sprintf("For tag %s used unit %s, also encountered unit(s) %s", k, numLabelUnits[k], strings.Join(units, ", ")))
+ }
+ return numLabelUnits
+}
+
+type sampleValueFunc func([]int64) int64
+
+// sampleFormat returns a function to extract values out of a profile.Sample,
+// and the type/units of those values.
+func sampleFormat(p *profile.Profile, sampleIndex string, mean bool) (value, meanDiv sampleValueFunc, v *profile.ValueType, err error) {
+ if len(p.SampleType) == 0 {
+ return nil, nil, nil, fmt.Errorf("profile has no samples")
+ }
+ index, err := p.SampleIndexByName(sampleIndex)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+ value = valueExtractor(index)
+ if mean {
+ meanDiv = valueExtractor(0)
+ }
+ v = p.SampleType[index]
+ return
+}
+
+func valueExtractor(ix int) sampleValueFunc {
+ return func(v []int64) int64 {
+ return v[ix]
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go
new file mode 100644
index 0000000..fd05adb
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/driver_focus.go
@@ -0,0 +1,219 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+var tagFilterRangeRx = regexp.MustCompile("([+-]?[[:digit:]]+)([[:alpha:]]+)?")
+
+// applyFocus filters samples based on the focus/ignore options
+func applyFocus(prof *profile.Profile, numLabelUnits map[string]string, cfg config, ui plugin.UI) error {
+ focus, err := compileRegexOption("focus", cfg.Focus, nil)
+ ignore, err := compileRegexOption("ignore", cfg.Ignore, err)
+ hide, err := compileRegexOption("hide", cfg.Hide, err)
+ show, err := compileRegexOption("show", cfg.Show, err)
+ showfrom, err := compileRegexOption("show_from", cfg.ShowFrom, err)
+ tagfocus, err := compileTagFilter("tagfocus", cfg.TagFocus, numLabelUnits, ui, err)
+ tagignore, err := compileTagFilter("tagignore", cfg.TagIgnore, numLabelUnits, ui, err)
+ prunefrom, err := compileRegexOption("prune_from", cfg.PruneFrom, err)
+ if err != nil {
+ return err
+ }
+
+ fm, im, hm, hnm := prof.FilterSamplesByName(focus, ignore, hide, show)
+ warnNoMatches(focus == nil || fm, "Focus", ui)
+ warnNoMatches(ignore == nil || im, "Ignore", ui)
+ warnNoMatches(hide == nil || hm, "Hide", ui)
+ warnNoMatches(show == nil || hnm, "Show", ui)
+
+ sfm := prof.ShowFrom(showfrom)
+ warnNoMatches(showfrom == nil || sfm, "ShowFrom", ui)
+
+ tfm, tim := prof.FilterSamplesByTag(tagfocus, tagignore)
+ warnNoMatches(tagfocus == nil || tfm, "TagFocus", ui)
+ warnNoMatches(tagignore == nil || tim, "TagIgnore", ui)
+
+ tagshow, err := compileRegexOption("tagshow", cfg.TagShow, err)
+ taghide, err := compileRegexOption("taghide", cfg.TagHide, err)
+ tns, tnh := prof.FilterTagsByName(tagshow, taghide)
+ warnNoMatches(tagshow == nil || tns, "TagShow", ui)
+ warnNoMatches(taghide == nil || tnh, "TagHide", ui)
+
+ if prunefrom != nil {
+ prof.PruneFrom(prunefrom)
+ }
+ return err
+}
+
+func compileRegexOption(name, value string, err error) (*regexp.Regexp, error) {
+ if value == "" || err != nil {
+ return nil, err
+ }
+ rx, err := regexp.Compile(value)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s regexp: %v", name, err)
+ }
+ return rx, nil
+}
+
+func compileTagFilter(name, value string, numLabelUnits map[string]string, ui plugin.UI, err error) (func(*profile.Sample) bool, error) {
+ if value == "" || err != nil {
+ return nil, err
+ }
+
+ tagValuePair := strings.SplitN(value, "=", 2)
+ var wantKey string
+ if len(tagValuePair) == 2 {
+ wantKey = tagValuePair[0]
+ value = tagValuePair[1]
+ }
+
+ if numFilter := parseTagFilterRange(value); numFilter != nil {
+ ui.PrintErr(name, ":Interpreted '", value, "' as range, not regexp")
+ labelFilter := func(vals []int64, unit string) bool {
+ for _, val := range vals {
+ if numFilter(val, unit) {
+ return true
+ }
+ }
+ return false
+ }
+ numLabelUnit := func(key string) string {
+ return numLabelUnits[key]
+ }
+ if wantKey == "" {
+ return func(s *profile.Sample) bool {
+ for key, vals := range s.NumLabel {
+ if labelFilter(vals, numLabelUnit(key)) {
+ return true
+ }
+ }
+ return false
+ }, nil
+ }
+ return func(s *profile.Sample) bool {
+ if vals, ok := s.NumLabel[wantKey]; ok {
+ return labelFilter(vals, numLabelUnit(wantKey))
+ }
+ return false
+ }, nil
+ }
+
+ var rfx []*regexp.Regexp
+ for _, tagf := range strings.Split(value, ",") {
+ fx, err := regexp.Compile(tagf)
+ if err != nil {
+ return nil, fmt.Errorf("parsing %s regexp: %v", name, err)
+ }
+ rfx = append(rfx, fx)
+ }
+ if wantKey == "" {
+ return func(s *profile.Sample) bool {
+ matchedrx:
+ for _, rx := range rfx {
+ for key, vals := range s.Label {
+ for _, val := range vals {
+ // TODO: Match against val, not key:val in future
+ if rx.MatchString(key + ":" + val) {
+ continue matchedrx
+ }
+ }
+ }
+ return false
+ }
+ return true
+ }, nil
+ }
+ return func(s *profile.Sample) bool {
+ if vals, ok := s.Label[wantKey]; ok {
+ for _, rx := range rfx {
+ for _, val := range vals {
+ if rx.MatchString(val) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+ }, nil
+}
+
+// parseTagFilterRange returns a function to checks if a value is
+// contained on the range described by a string. It can recognize
+// strings of the form:
+// "32kb" -- matches values == 32kb
+// ":64kb" -- matches values <= 64kb
+// "4mb:" -- matches values >= 4mb
+// "12kb:64mb" -- matches values between 12kb and 64mb (both included).
+func parseTagFilterRange(filter string) func(int64, string) bool {
+ ranges := tagFilterRangeRx.FindAllStringSubmatch(filter, 2)
+ if len(ranges) == 0 {
+ return nil // No ranges were identified
+ }
+ v, err := strconv.ParseInt(ranges[0][1], 10, 64)
+ if err != nil {
+ panic(fmt.Errorf("failed to parse int %s: %v", ranges[0][1], err))
+ }
+ scaledValue, unit := measurement.Scale(v, ranges[0][2], ranges[0][2])
+ if len(ranges) == 1 {
+ switch match := ranges[0][0]; filter {
+ case match:
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv == scaledValue
+ }
+ case match + ":":
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv >= scaledValue
+ }
+ case ":" + match:
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv <= scaledValue
+ }
+ }
+ return nil
+ }
+ if filter != ranges[0][0]+":"+ranges[1][0] {
+ return nil
+ }
+ if v, err = strconv.ParseInt(ranges[1][1], 10, 64); err != nil {
+ panic(fmt.Errorf("failed to parse int %s: %v", ranges[1][1], err))
+ }
+ scaledValue2, unit2 := measurement.Scale(v, ranges[1][2], unit)
+ if unit != unit2 {
+ return nil
+ }
+ return func(v int64, u string) bool {
+ sv, su := measurement.Scale(v, u, unit)
+ return su == unit && sv >= scaledValue && sv <= scaledValue2
+ }
+}
+
+func warnNoMatches(match bool, option string, ui plugin.UI) {
+ if !match {
+ ui.PrintErr(option + " expression matched no samples")
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go
new file mode 100644
index 0000000..b8a69e8
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/fetch.go
@@ -0,0 +1,587 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// fetchProfiles fetches and symbolizes the profiles specified by s.
+// It will merge all the profiles it is able to retrieve, even if
+// there are some failures. It will return an error if it is unable to
+// fetch any profiles.
+func fetchProfiles(s *source, o *plugin.Options) (*profile.Profile, error) {
+ sources := make([]profileSource, 0, len(s.Sources))
+ for _, src := range s.Sources {
+ sources = append(sources, profileSource{
+ addr: src,
+ source: s,
+ })
+ }
+
+ bases := make([]profileSource, 0, len(s.Base))
+ for _, src := range s.Base {
+ bases = append(bases, profileSource{
+ addr: src,
+ source: s,
+ })
+ }
+
+ p, pbase, m, mbase, save, err := grabSourcesAndBases(sources, bases, o.Fetch, o.Obj, o.UI, o.HTTPTransport)
+ if err != nil {
+ return nil, err
+ }
+
+ if pbase != nil {
+ if s.DiffBase {
+ pbase.SetLabel("pprof::base", []string{"true"})
+ }
+ if s.Normalize {
+ err := p.Normalize(pbase)
+ if err != nil {
+ return nil, err
+ }
+ }
+ pbase.Scale(-1)
+ p, m, err = combineProfiles([]*profile.Profile{p, pbase}, []plugin.MappingSources{m, mbase})
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Symbolize the merged profile.
+ if err := o.Sym.Symbolize(s.Symbolize, m, p); err != nil {
+ return nil, err
+ }
+ p.RemoveUninteresting()
+ unsourceMappings(p)
+
+ if s.Comment != "" {
+ p.Comments = append(p.Comments, s.Comment)
+ }
+
+ // Save a copy of the merged profile if there is at least one remote source.
+ if save {
+ dir, err := setTmpDir(o.UI)
+ if err != nil {
+ return nil, err
+ }
+
+ prefix := "pprof."
+ if len(p.Mapping) > 0 && p.Mapping[0].File != "" {
+ prefix += filepath.Base(p.Mapping[0].File) + "."
+ }
+ for _, s := range p.SampleType {
+ prefix += s.Type + "."
+ }
+
+ tempFile, err := newTempFile(dir, prefix, ".pb.gz")
+ if err == nil {
+ if err = p.Write(tempFile); err == nil {
+ o.UI.PrintErr("Saved profile in ", tempFile.Name())
+ }
+ }
+ if err != nil {
+ o.UI.PrintErr("Could not save profile: ", err)
+ }
+ }
+
+ if err := p.CheckValid(); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+func grabSourcesAndBases(sources, bases []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, *profile.Profile, plugin.MappingSources, plugin.MappingSources, bool, error) {
+ wg := sync.WaitGroup{}
+ wg.Add(2)
+ var psrc, pbase *profile.Profile
+ var msrc, mbase plugin.MappingSources
+ var savesrc, savebase bool
+ var errsrc, errbase error
+ var countsrc, countbase int
+ go func() {
+ defer wg.Done()
+ psrc, msrc, savesrc, countsrc, errsrc = chunkedGrab(sources, fetch, obj, ui, tr)
+ }()
+ go func() {
+ defer wg.Done()
+ pbase, mbase, savebase, countbase, errbase = chunkedGrab(bases, fetch, obj, ui, tr)
+ }()
+ wg.Wait()
+ save := savesrc || savebase
+
+ if errsrc != nil {
+ return nil, nil, nil, nil, false, fmt.Errorf("problem fetching source profiles: %v", errsrc)
+ }
+ if errbase != nil {
+ return nil, nil, nil, nil, false, fmt.Errorf("problem fetching base profiles: %v,", errbase)
+ }
+ if countsrc == 0 {
+ return nil, nil, nil, nil, false, fmt.Errorf("failed to fetch any source profiles")
+ }
+ if countbase == 0 && len(bases) > 0 {
+ return nil, nil, nil, nil, false, fmt.Errorf("failed to fetch any base profiles")
+ }
+ if want, got := len(sources), countsrc; want != got {
+ ui.PrintErr(fmt.Sprintf("Fetched %d source profiles out of %d", got, want))
+ }
+ if want, got := len(bases), countbase; want != got {
+ ui.PrintErr(fmt.Sprintf("Fetched %d base profiles out of %d", got, want))
+ }
+
+ return psrc, pbase, msrc, mbase, save, nil
+}
+
+// chunkedGrab fetches the profiles described in source and merges them into
+// a single profile. It fetches a chunk of profiles concurrently, with a maximum
+// chunk size to limit its memory usage.
+func chunkedGrab(sources []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, plugin.MappingSources, bool, int, error) {
+ const chunkSize = 64
+
+ var p *profile.Profile
+ var msrc plugin.MappingSources
+ var save bool
+ var count int
+
+ for start := 0; start < len(sources); start += chunkSize {
+ end := start + chunkSize
+ if end > len(sources) {
+ end = len(sources)
+ }
+ chunkP, chunkMsrc, chunkSave, chunkCount, chunkErr := concurrentGrab(sources[start:end], fetch, obj, ui, tr)
+ switch {
+ case chunkErr != nil:
+ return nil, nil, false, 0, chunkErr
+ case chunkP == nil:
+ continue
+ case p == nil:
+ p, msrc, save, count = chunkP, chunkMsrc, chunkSave, chunkCount
+ default:
+ p, msrc, chunkErr = combineProfiles([]*profile.Profile{p, chunkP}, []plugin.MappingSources{msrc, chunkMsrc})
+ if chunkErr != nil {
+ return nil, nil, false, 0, chunkErr
+ }
+ if chunkSave {
+ save = true
+ }
+ count += chunkCount
+ }
+ }
+
+ return p, msrc, save, count, nil
+}
+
+// concurrentGrab fetches multiple profiles concurrently
+func concurrentGrab(sources []profileSource, fetch plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (*profile.Profile, plugin.MappingSources, bool, int, error) {
+ wg := sync.WaitGroup{}
+ wg.Add(len(sources))
+ for i := range sources {
+ go func(s *profileSource) {
+ defer wg.Done()
+ s.p, s.msrc, s.remote, s.err = grabProfile(s.source, s.addr, fetch, obj, ui, tr)
+ }(&sources[i])
+ }
+ wg.Wait()
+
+ var save bool
+ profiles := make([]*profile.Profile, 0, len(sources))
+ msrcs := make([]plugin.MappingSources, 0, len(sources))
+ for i := range sources {
+ s := &sources[i]
+ if err := s.err; err != nil {
+ ui.PrintErr(s.addr + ": " + err.Error())
+ continue
+ }
+ save = save || s.remote
+ profiles = append(profiles, s.p)
+ msrcs = append(msrcs, s.msrc)
+ *s = profileSource{}
+ }
+
+ if len(profiles) == 0 {
+ return nil, nil, false, 0, nil
+ }
+
+ p, msrc, err := combineProfiles(profiles, msrcs)
+ if err != nil {
+ return nil, nil, false, 0, err
+ }
+ return p, msrc, save, len(profiles), nil
+}
+
+func combineProfiles(profiles []*profile.Profile, msrcs []plugin.MappingSources) (*profile.Profile, plugin.MappingSources, error) {
+ // Merge profiles.
+ if err := measurement.ScaleProfiles(profiles); err != nil {
+ return nil, nil, err
+ }
+
+ p, err := profile.Merge(profiles)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // Combine mapping sources.
+ msrc := make(plugin.MappingSources)
+ for _, ms := range msrcs {
+ for m, s := range ms {
+ msrc[m] = append(msrc[m], s...)
+ }
+ }
+ return p, msrc, nil
+}
+
+type profileSource struct {
+ addr string
+ source *source
+
+ p *profile.Profile
+ msrc plugin.MappingSources
+ remote bool
+ err error
+}
+
+func homeEnv() string {
+ switch runtime.GOOS {
+ case "windows":
+ return "USERPROFILE"
+ case "plan9":
+ return "home"
+ default:
+ return "HOME"
+ }
+}
+
+// setTmpDir prepares the directory to use to save profiles retrieved
+// remotely. It is selected from PPROF_TMPDIR, defaults to $HOME/pprof, and, if
+// $HOME is not set, falls back to os.TempDir().
+func setTmpDir(ui plugin.UI) (string, error) {
+ var dirs []string
+ if profileDir := os.Getenv("PPROF_TMPDIR"); profileDir != "" {
+ dirs = append(dirs, profileDir)
+ }
+ if homeDir := os.Getenv(homeEnv()); homeDir != "" {
+ dirs = append(dirs, filepath.Join(homeDir, "pprof"))
+ }
+ dirs = append(dirs, os.TempDir())
+ for _, tmpDir := range dirs {
+ if err := os.MkdirAll(tmpDir, 0755); err != nil {
+ ui.PrintErr("Could not use temp dir ", tmpDir, ": ", err.Error())
+ continue
+ }
+ return tmpDir, nil
+ }
+ return "", fmt.Errorf("failed to identify temp dir")
+}
+
+const testSourceAddress = "pproftest.local"
+
+// grabProfile fetches a profile. Returns the profile, sources for the
+// profile mappings, a bool indicating if the profile was fetched
+// remotely, and an error.
+func grabProfile(s *source, source string, fetcher plugin.Fetcher, obj plugin.ObjTool, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, msrc plugin.MappingSources, remote bool, err error) {
+ var src string
+ duration, timeout := time.Duration(s.Seconds)*time.Second, time.Duration(s.Timeout)*time.Second
+ if fetcher != nil {
+ p, src, err = fetcher.Fetch(source, duration, timeout)
+ if err != nil {
+ return
+ }
+ }
+ if err != nil || p == nil {
+ // Fetch the profile over HTTP or from a file.
+ p, src, err = fetch(source, duration, timeout, ui, tr)
+ if err != nil {
+ return
+ }
+ }
+
+ if err = p.CheckValid(); err != nil {
+ return
+ }
+
+ // Update the binary locations from command line and paths.
+ locateBinaries(p, s, obj, ui)
+
+ // Collect the source URL for all mappings.
+ if src != "" {
+ msrc = collectMappingSources(p, src)
+ remote = true
+ if strings.HasPrefix(src, "http://"+testSourceAddress) {
+ // Treat test inputs as local to avoid saving
+ // testcase profiles during driver testing.
+ remote = false
+ }
+ }
+ return
+}
+
+// collectMappingSources saves the mapping sources of a profile.
+func collectMappingSources(p *profile.Profile, source string) plugin.MappingSources {
+ ms := plugin.MappingSources{}
+ for _, m := range p.Mapping {
+ src := struct {
+ Source string
+ Start uint64
+ }{
+ source, m.Start,
+ }
+ key := m.BuildID
+ if key == "" {
+ key = m.File
+ }
+ if key == "" {
+ // If there is no build id or source file, use the source as the
+ // mapping file. This will enable remote symbolization for this
+ // mapping, in particular for Go profiles on the legacy format.
+ // The source is reset back to empty string by unsourceMapping
+ // which is called after symbolization is finished.
+ m.File = source
+ key = source
+ }
+ ms[key] = append(ms[key], src)
+ }
+ return ms
+}
+
+// unsourceMappings iterates over the mappings in a profile and replaces file
+// set to the remote source URL by collectMappingSources back to empty string.
+func unsourceMappings(p *profile.Profile) {
+ for _, m := range p.Mapping {
+ if m.BuildID == "" {
+ if u, err := url.Parse(m.File); err == nil && u.IsAbs() {
+ m.File = ""
+ }
+ }
+ }
+}
+
+// locateBinaries searches for binary files listed in the profile and, if found,
+// updates the profile accordingly.
+func locateBinaries(p *profile.Profile, s *source, obj plugin.ObjTool, ui plugin.UI) {
+ // Construct search path to examine
+ searchPath := os.Getenv("PPROF_BINARY_PATH")
+ if searchPath == "" {
+ // Use $HOME/pprof/binaries as default directory for local symbolization binaries
+ searchPath = filepath.Join(os.Getenv(homeEnv()), "pprof", "binaries")
+ }
+mapping:
+ for _, m := range p.Mapping {
+ var baseName string
+ if m.File != "" {
+ baseName = filepath.Base(m.File)
+ }
+
+ for _, path := range filepath.SplitList(searchPath) {
+ var fileNames []string
+ if m.BuildID != "" {
+ fileNames = []string{filepath.Join(path, m.BuildID, baseName)}
+ if matches, err := filepath.Glob(filepath.Join(path, m.BuildID, "*")); err == nil {
+ fileNames = append(fileNames, matches...)
+ }
+ fileNames = append(fileNames, filepath.Join(path, m.File, m.BuildID)) // perf path format
+ }
+ if m.File != "" {
+ // Try both the basename and the full path, to support the same directory
+ // structure as the perf symfs option.
+ if baseName != "" {
+ fileNames = append(fileNames, filepath.Join(path, baseName))
+ }
+ fileNames = append(fileNames, filepath.Join(path, m.File))
+ }
+ for _, name := range fileNames {
+ if f, err := obj.Open(name, m.Start, m.Limit, m.Offset); err == nil {
+ defer f.Close()
+ fileBuildID := f.BuildID()
+ if m.BuildID != "" && m.BuildID != fileBuildID {
+ ui.PrintErr("Ignoring local file " + name + ": build-id mismatch (" + m.BuildID + " != " + fileBuildID + ")")
+ } else {
+ m.File = name
+ continue mapping
+ }
+ }
+ }
+ }
+ }
+ if len(p.Mapping) == 0 {
+ // If there are no mappings, add a fake mapping to attempt symbolization.
+ // This is useful for some profiles generated by the golang runtime, which
+ // do not include any mappings. Symbolization with a fake mapping will only
+ // be successful against a non-PIE binary.
+ m := &profile.Mapping{ID: 1}
+ p.Mapping = []*profile.Mapping{m}
+ for _, l := range p.Location {
+ l.Mapping = m
+ }
+ }
+ // Replace executable filename/buildID with the overrides from source.
+ // Assumes the executable is the first Mapping entry.
+ if execName, buildID := s.ExecName, s.BuildID; execName != "" || buildID != "" {
+ m := p.Mapping[0]
+ if execName != "" {
+ m.File = execName
+ }
+ if buildID != "" {
+ m.BuildID = buildID
+ }
+ }
+}
+
+// fetch fetches a profile from source, within the timeout specified,
+// producing messages through the ui. It returns the profile and the
+// url of the actual source of the profile for remote profiles.
+func fetch(source string, duration, timeout time.Duration, ui plugin.UI, tr http.RoundTripper) (p *profile.Profile, src string, err error) {
+ var f io.ReadCloser
+
+ if sourceURL, timeout := adjustURL(source, duration, timeout); sourceURL != "" {
+ ui.Print("Fetching profile over HTTP from " + sourceURL)
+ if duration > 0 {
+ ui.Print(fmt.Sprintf("Please wait... (%v)", duration))
+ }
+ f, err = fetchURL(sourceURL, timeout, tr)
+ src = sourceURL
+ } else if isPerfFile(source) {
+ f, err = convertPerfData(source, ui)
+ } else {
+ f, err = os.Open(source)
+ }
+ if err == nil {
+ defer f.Close()
+ p, err = profile.Parse(f)
+ }
+ return
+}
+
+// fetchURL fetches a profile from a URL using HTTP.
+func fetchURL(source string, timeout time.Duration, tr http.RoundTripper) (io.ReadCloser, error) {
+ client := &http.Client{
+ Transport: tr,
+ Timeout: timeout + 5*time.Second,
+ }
+ resp, err := client.Get(source)
+ if err != nil {
+ return nil, fmt.Errorf("http fetch: %v", err)
+ }
+ if resp.StatusCode != http.StatusOK {
+ defer resp.Body.Close()
+ return nil, statusCodeError(resp)
+ }
+
+ return resp.Body, nil
+}
+
+func statusCodeError(resp *http.Response) error {
+ if resp.Header.Get("X-Go-Pprof") != "" && strings.Contains(resp.Header.Get("Content-Type"), "text/plain") {
+ // error is from pprof endpoint
+ if body, err := ioutil.ReadAll(resp.Body); err == nil {
+ return fmt.Errorf("server response: %s - %s", resp.Status, body)
+ }
+ }
+ return fmt.Errorf("server response: %s", resp.Status)
+}
+
+// isPerfFile checks if a file is in perf.data format. It also returns false
+// if it encounters an error during the check.
+func isPerfFile(path string) bool {
+ sourceFile, openErr := os.Open(path)
+ if openErr != nil {
+ return false
+ }
+ defer sourceFile.Close()
+
+ // If the file is the output of a perf record command, it should begin
+ // with the string PERFILE2.
+ perfHeader := []byte("PERFILE2")
+ actualHeader := make([]byte, len(perfHeader))
+ if _, readErr := sourceFile.Read(actualHeader); readErr != nil {
+ return false
+ }
+ return bytes.Equal(actualHeader, perfHeader)
+}
+
+// convertPerfData converts the file at path which should be in perf.data format
+// using the perf_to_profile tool and returns the file containing the
+// profile.proto formatted data.
+func convertPerfData(perfPath string, ui plugin.UI) (*os.File, error) {
+ ui.Print(fmt.Sprintf(
+ "Converting %s to a profile.proto... (May take a few minutes)",
+ perfPath))
+ profile, err := newTempFile(os.TempDir(), "pprof_", ".pb.gz")
+ if err != nil {
+ return nil, err
+ }
+ deferDeleteTempFile(profile.Name())
+ cmd := exec.Command("perf_to_profile", "-i", perfPath, "-o", profile.Name(), "-f")
+ cmd.Stdout, cmd.Stderr = os.Stdout, os.Stderr
+ if err := cmd.Run(); err != nil {
+ profile.Close()
+ return nil, fmt.Errorf("failed to convert perf.data file. Try github.com/google/perf_data_converter: %v", err)
+ }
+ return profile, nil
+}
+
+// adjustURL validates if a profile source is a URL and returns an
+// cleaned up URL and the timeout to use for retrieval over HTTP.
+// If the source cannot be recognized as a URL it returns an empty string.
+func adjustURL(source string, duration, timeout time.Duration) (string, time.Duration) {
+ u, err := url.Parse(source)
+ if err != nil || (u.Host == "" && u.Scheme != "" && u.Scheme != "file") {
+ // Try adding http:// to catch sources of the form hostname:port/path.
+ // url.Parse treats "hostname" as the scheme.
+ u, err = url.Parse("http://" + source)
+ }
+ if err != nil || u.Host == "" {
+ return "", 0
+ }
+
+ // Apply duration/timeout overrides to URL.
+ values := u.Query()
+ if duration > 0 {
+ values.Set("seconds", fmt.Sprint(int(duration.Seconds())))
+ } else {
+ if urlSeconds := values.Get("seconds"); urlSeconds != "" {
+ if us, err := strconv.ParseInt(urlSeconds, 10, 32); err == nil {
+ duration = time.Duration(us) * time.Second
+ }
+ }
+ }
+ if timeout <= 0 {
+ if duration > 0 {
+ timeout = duration + duration/2
+ } else {
+ timeout = 60 * time.Second
+ }
+ }
+ u.RawQuery = values.Encode()
+ return u.String(), timeout
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go
new file mode 100644
index 0000000..5390319
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/flags.go
@@ -0,0 +1,71 @@
+// Copyright 2018 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "flag"
+ "strings"
+)
+
+// GoFlags implements the plugin.FlagSet interface.
+type GoFlags struct {
+ UsageMsgs []string
+}
+
+// Bool implements the plugin.FlagSet interface.
+func (*GoFlags) Bool(o string, d bool, c string) *bool {
+ return flag.Bool(o, d, c)
+}
+
+// Int implements the plugin.FlagSet interface.
+func (*GoFlags) Int(o string, d int, c string) *int {
+ return flag.Int(o, d, c)
+}
+
+// Float64 implements the plugin.FlagSet interface.
+func (*GoFlags) Float64(o string, d float64, c string) *float64 {
+ return flag.Float64(o, d, c)
+}
+
+// String implements the plugin.FlagSet interface.
+func (*GoFlags) String(o, d, c string) *string {
+ return flag.String(o, d, c)
+}
+
+// StringList implements the plugin.FlagSet interface.
+func (*GoFlags) StringList(o, d, c string) *[]*string {
+ return &[]*string{flag.String(o, d, c)}
+}
+
+// ExtraUsage implements the plugin.FlagSet interface.
+func (f *GoFlags) ExtraUsage() string {
+ return strings.Join(f.UsageMsgs, "\n")
+}
+
+// AddExtraUsage implements the plugin.FlagSet interface.
+func (f *GoFlags) AddExtraUsage(eu string) {
+ f.UsageMsgs = append(f.UsageMsgs, eu)
+}
+
+// Parse implements the plugin.FlagSet interface.
+func (*GoFlags) Parse(usage func()) []string {
+ flag.Usage = usage
+ flag.Parse()
+ args := flag.Args()
+ if len(args) == 0 {
+ usage()
+ }
+ return args
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go
new file mode 100644
index 0000000..fbeb765
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/flamegraph.go
@@ -0,0 +1,106 @@
+// Copyright 2017 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "encoding/json"
+ "html/template"
+ "net/http"
+ "strings"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/report"
+)
+
+type treeNode struct {
+ Name string `json:"n"`
+ FullName string `json:"f"`
+ Cum int64 `json:"v"`
+ CumFormat string `json:"l"`
+ Percent string `json:"p"`
+ Children []*treeNode `json:"c"`
+}
+
+// flamegraph generates a web page containing a flamegraph.
+func (ui *webInterface) flamegraph(w http.ResponseWriter, req *http.Request) {
+ // Force the call tree so that the graph is a tree.
+ // Also do not trim the tree so that the flame graph contains all functions.
+ rpt, errList := ui.makeReport(w, req, []string{"svg"}, func(cfg *config) {
+ cfg.CallTree = true
+ cfg.Trim = false
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Generate dot graph.
+ g, config := report.GetDOT(rpt)
+ var nodes []*treeNode
+ nroots := 0
+ rootValue := int64(0)
+ nodeArr := []string{}
+ nodeMap := map[*graph.Node]*treeNode{}
+ // Make all nodes and the map, collect the roots.
+ for _, n := range g.Nodes {
+ v := n.CumValue()
+ fullName := n.Info.PrintableName()
+ node := &treeNode{
+ Name: graph.ShortenFunctionName(fullName),
+ FullName: fullName,
+ Cum: v,
+ CumFormat: config.FormatValue(v),
+ Percent: strings.TrimSpace(measurement.Percentage(v, config.Total)),
+ }
+ nodes = append(nodes, node)
+ if len(n.In) == 0 {
+ nodes[nroots], nodes[len(nodes)-1] = nodes[len(nodes)-1], nodes[nroots]
+ nroots++
+ rootValue += v
+ }
+ nodeMap[n] = node
+ // Get all node names into an array.
+ nodeArr = append(nodeArr, n.Info.Name)
+ }
+ // Populate the child links.
+ for _, n := range g.Nodes {
+ node := nodeMap[n]
+ for child := range n.Out {
+ node.Children = append(node.Children, nodeMap[child])
+ }
+ }
+
+ rootNode := &treeNode{
+ Name: "root",
+ FullName: "root",
+ Cum: rootValue,
+ CumFormat: config.FormatValue(rootValue),
+ Percent: strings.TrimSpace(measurement.Percentage(rootValue, config.Total)),
+ Children: nodes[0:nroots],
+ }
+
+ // JSON marshalling flame graph
+ b, err := json.Marshal(rootNode)
+ if err != nil {
+ http.Error(w, "error serializing flame graph", http.StatusInternalServerError)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ ui.render(w, req, "flamegraph", rpt, errList, config.Labels, webArgs{
+ FlameGraph: template.JS(b),
+ Nodes: nodeArr,
+ })
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go
new file mode 100644
index 0000000..777fb90
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/interactive.go
@@ -0,0 +1,418 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "fmt"
+ "io"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+ "github.com/google/pprof/profile"
+)
+
+var commentStart = "//:" // Sentinel for comments on options
+var tailDigitsRE = regexp.MustCompile("[0-9]+$")
+
+// interactive starts a shell to read pprof commands.
+func interactive(p *profile.Profile, o *plugin.Options) error {
+ // Enter command processing loop.
+ o.UI.SetAutoComplete(newCompleter(functionNames(p)))
+ configure("compact_labels", "true")
+ configHelp["sample_index"] += fmt.Sprintf("Or use sample_index=name, with name in %v.\n", sampleTypes(p))
+
+ // Do not wait for the visualizer to complete, to allow multiple
+ // graphs to be visualized simultaneously.
+ interactiveMode = true
+ shortcuts := profileShortcuts(p)
+
+ greetings(p, o.UI)
+ for {
+ input, err := o.UI.ReadLine("(pprof) ")
+ if err != nil {
+ if err != io.EOF {
+ return err
+ }
+ if input == "" {
+ return nil
+ }
+ }
+
+ for _, input := range shortcuts.expand(input) {
+ // Process assignments of the form variable=value
+ if s := strings.SplitN(input, "=", 2); len(s) > 0 {
+ name := strings.TrimSpace(s[0])
+ var value string
+ if len(s) == 2 {
+ value = s[1]
+ if comment := strings.LastIndex(value, commentStart); comment != -1 {
+ value = value[:comment]
+ }
+ value = strings.TrimSpace(value)
+ }
+ if isConfigurable(name) {
+ // All non-bool options require inputs
+ if len(s) == 1 && !isBoolConfig(name) {
+ o.UI.PrintErr(fmt.Errorf("please specify a value, e.g. %s=<val>", name))
+ continue
+ }
+ if name == "sample_index" {
+ // Error check sample_index=xxx to ensure xxx is a valid sample type.
+ index, err := p.SampleIndexByName(value)
+ if err != nil {
+ o.UI.PrintErr(err)
+ continue
+ }
+ if index < 0 || index >= len(p.SampleType) {
+ o.UI.PrintErr(fmt.Errorf("invalid sample_index %q", value))
+ continue
+ }
+ value = p.SampleType[index].Type
+ }
+ if err := configure(name, value); err != nil {
+ o.UI.PrintErr(err)
+ }
+ continue
+ }
+ }
+
+ tokens := strings.Fields(input)
+ if len(tokens) == 0 {
+ continue
+ }
+
+ switch tokens[0] {
+ case "o", "options":
+ printCurrentOptions(p, o.UI)
+ continue
+ case "exit", "quit", "q":
+ return nil
+ case "help":
+ commandHelp(strings.Join(tokens[1:], " "), o.UI)
+ continue
+ }
+
+ args, cfg, err := parseCommandLine(tokens)
+ if err == nil {
+ err = generateReportWrapper(p, args, cfg, o)
+ }
+
+ if err != nil {
+ o.UI.PrintErr(err)
+ }
+ }
+ }
+}
+
+var generateReportWrapper = generateReport // For testing purposes.
+
+// greetings prints a brief welcome and some overall profile
+// information before accepting interactive commands.
+func greetings(p *profile.Profile, ui plugin.UI) {
+ numLabelUnits := identifyNumLabelUnits(p, ui)
+ ropt, err := reportOptions(p, numLabelUnits, currentConfig())
+ if err == nil {
+ rpt := report.New(p, ropt)
+ ui.Print(strings.Join(report.ProfileLabels(rpt), "\n"))
+ if rpt.Total() == 0 && len(p.SampleType) > 1 {
+ ui.Print(`No samples were found with the default sample value type.`)
+ ui.Print(`Try "sample_index" command to analyze different sample values.`, "\n")
+ }
+ }
+ ui.Print(`Entering interactive mode (type "help" for commands, "o" for options)`)
+}
+
+// shortcuts represents composite commands that expand into a sequence
+// of other commands.
+type shortcuts map[string][]string
+
+func (a shortcuts) expand(input string) []string {
+ input = strings.TrimSpace(input)
+ if a != nil {
+ if r, ok := a[input]; ok {
+ return r
+ }
+ }
+ return []string{input}
+}
+
+var pprofShortcuts = shortcuts{
+ ":": []string{"focus=", "ignore=", "hide=", "tagfocus=", "tagignore="},
+}
+
+// profileShortcuts creates macros for convenience and backward compatibility.
+func profileShortcuts(p *profile.Profile) shortcuts {
+ s := pprofShortcuts
+ // Add shortcuts for sample types
+ for _, st := range p.SampleType {
+ command := fmt.Sprintf("sample_index=%s", st.Type)
+ s[st.Type] = []string{command}
+ s["total_"+st.Type] = []string{"mean=0", command}
+ s["mean_"+st.Type] = []string{"mean=1", command}
+ }
+ return s
+}
+
+func sampleTypes(p *profile.Profile) []string {
+ types := make([]string, len(p.SampleType))
+ for i, t := range p.SampleType {
+ types[i] = t.Type
+ }
+ return types
+}
+
+func printCurrentOptions(p *profile.Profile, ui plugin.UI) {
+ var args []string
+ current := currentConfig()
+ for _, f := range configFields {
+ n := f.name
+ v := current.get(f)
+ comment := ""
+ switch {
+ case len(f.choices) > 0:
+ values := append([]string{}, f.choices...)
+ sort.Strings(values)
+ comment = "[" + strings.Join(values, " | ") + "]"
+ case n == "sample_index":
+ st := sampleTypes(p)
+ if v == "" {
+ // Apply default (last sample index).
+ v = st[len(st)-1]
+ }
+ // Add comments for all sample types in profile.
+ comment = "[" + strings.Join(st, " | ") + "]"
+ case n == "source_path":
+ continue
+ case n == "nodecount" && v == "-1":
+ comment = "default"
+ case v == "":
+ // Add quotes for empty values.
+ v = `""`
+ }
+ if comment != "" {
+ comment = commentStart + " " + comment
+ }
+ args = append(args, fmt.Sprintf(" %-25s = %-20s %s", n, v, comment))
+ }
+ sort.Strings(args)
+ ui.Print(strings.Join(args, "\n"))
+}
+
+// parseCommandLine parses a command and returns the pprof command to
+// execute and the configuration to use for the report.
+func parseCommandLine(input []string) ([]string, config, error) {
+ cmd, args := input[:1], input[1:]
+ name := cmd[0]
+
+ c := pprofCommands[name]
+ if c == nil {
+ // Attempt splitting digits on abbreviated commands (eg top10)
+ if d := tailDigitsRE.FindString(name); d != "" && d != name {
+ name = name[:len(name)-len(d)]
+ cmd[0], args = name, append([]string{d}, args...)
+ c = pprofCommands[name]
+ }
+ }
+ if c == nil {
+ if _, ok := configHelp[name]; ok {
+ value := "<val>"
+ if len(args) > 0 {
+ value = args[0]
+ }
+ return nil, config{}, fmt.Errorf("did you mean: %s=%s", name, value)
+ }
+ return nil, config{}, fmt.Errorf("unrecognized command: %q", name)
+ }
+
+ if c.hasParam {
+ if len(args) == 0 {
+ return nil, config{}, fmt.Errorf("command %s requires an argument", name)
+ }
+ cmd = append(cmd, args[0])
+ args = args[1:]
+ }
+
+ // Copy config since options set in the command line should not persist.
+ vcopy := currentConfig()
+
+ var focus, ignore string
+ for i := 0; i < len(args); i++ {
+ t := args[i]
+ if n, err := strconv.ParseInt(t, 10, 32); err == nil {
+ vcopy.NodeCount = int(n)
+ continue
+ }
+ switch t[0] {
+ case '>':
+ outputFile := t[1:]
+ if outputFile == "" {
+ i++
+ if i >= len(args) {
+ return nil, config{}, fmt.Errorf("unexpected end of line after >")
+ }
+ outputFile = args[i]
+ }
+ vcopy.Output = outputFile
+ case '-':
+ if t == "--cum" || t == "-cum" {
+ vcopy.Sort = "cum"
+ continue
+ }
+ ignore = catRegex(ignore, t[1:])
+ default:
+ focus = catRegex(focus, t)
+ }
+ }
+
+ if name == "tags" {
+ if focus != "" {
+ vcopy.TagFocus = focus
+ }
+ if ignore != "" {
+ vcopy.TagIgnore = ignore
+ }
+ } else {
+ if focus != "" {
+ vcopy.Focus = focus
+ }
+ if ignore != "" {
+ vcopy.Ignore = ignore
+ }
+ }
+ if vcopy.NodeCount == -1 && (name == "text" || name == "top") {
+ vcopy.NodeCount = 10
+ }
+
+ return cmd, vcopy, nil
+}
+
+func catRegex(a, b string) string {
+ if a != "" && b != "" {
+ return a + "|" + b
+ }
+ return a + b
+}
+
+// commandHelp displays help and usage information for all Commands
+// and Variables or a specific Command or Variable.
+func commandHelp(args string, ui plugin.UI) {
+ if args == "" {
+ help := usage(false)
+ help = help + `
+ : Clear focus/ignore/hide/tagfocus/tagignore
+
+ type "help <cmd|option>" for more information
+`
+
+ ui.Print(help)
+ return
+ }
+
+ if c := pprofCommands[args]; c != nil {
+ ui.Print(c.help(args))
+ return
+ }
+
+ if help, ok := configHelp[args]; ok {
+ ui.Print(help + "\n")
+ return
+ }
+
+ ui.PrintErr("Unknown command: " + args)
+}
+
+// newCompleter creates an autocompletion function for a set of commands.
+func newCompleter(fns []string) func(string) string {
+ return func(line string) string {
+ switch tokens := strings.Fields(line); len(tokens) {
+ case 0:
+ // Nothing to complete
+ case 1:
+ // Single token -- complete command name
+ if match := matchVariableOrCommand(tokens[0]); match != "" {
+ return match
+ }
+ case 2:
+ if tokens[0] == "help" {
+ if match := matchVariableOrCommand(tokens[1]); match != "" {
+ return tokens[0] + " " + match
+ }
+ return line
+ }
+ fallthrough
+ default:
+ // Multiple tokens -- complete using functions, except for tags
+ if cmd := pprofCommands[tokens[0]]; cmd != nil && tokens[0] != "tags" {
+ lastTokenIdx := len(tokens) - 1
+ lastToken := tokens[lastTokenIdx]
+ if strings.HasPrefix(lastToken, "-") {
+ lastToken = "-" + functionCompleter(lastToken[1:], fns)
+ } else {
+ lastToken = functionCompleter(lastToken, fns)
+ }
+ return strings.Join(append(tokens[:lastTokenIdx], lastToken), " ")
+ }
+ }
+ return line
+ }
+}
+
+// matchVariableOrCommand attempts to match a string token to the prefix of a Command.
+func matchVariableOrCommand(token string) string {
+ token = strings.ToLower(token)
+ var matches []string
+ for cmd := range pprofCommands {
+ if strings.HasPrefix(cmd, token) {
+ matches = append(matches, cmd)
+ }
+ }
+ matches = append(matches, completeConfig(token)...)
+ if len(matches) == 1 {
+ return matches[0]
+ }
+ return ""
+}
+
+// functionCompleter replaces provided substring with a function
+// name retrieved from a profile if a single match exists. Otherwise,
+// it returns unchanged substring. It defaults to no-op if the profile
+// is not specified.
+func functionCompleter(substring string, fns []string) string {
+ found := ""
+ for _, fName := range fns {
+ if strings.Contains(fName, substring) {
+ if found != "" {
+ return substring
+ }
+ found = fName
+ }
+ }
+ if found != "" {
+ return found
+ }
+ return substring
+}
+
+func functionNames(p *profile.Profile) []string {
+ var fns []string
+ for _, fn := range p.Function {
+ fns = append(fns, fn.Name)
+ }
+ return fns
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/options.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/options.go
new file mode 100644
index 0000000..6e8f9fc
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/options.go
@@ -0,0 +1,100 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/google/pprof/internal/binutils"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/symbolizer"
+ "github.com/google/pprof/internal/transport"
+)
+
+// setDefaults returns a new plugin.Options with zero fields sets to
+// sensible defaults.
+func setDefaults(o *plugin.Options) *plugin.Options {
+ d := &plugin.Options{}
+ if o != nil {
+ *d = *o
+ }
+ if d.Writer == nil {
+ d.Writer = oswriter{}
+ }
+ if d.Flagset == nil {
+ d.Flagset = &GoFlags{}
+ }
+ if d.Obj == nil {
+ d.Obj = &binutils.Binutils{}
+ }
+ if d.UI == nil {
+ d.UI = &stdUI{r: bufio.NewReader(os.Stdin)}
+ }
+ if d.HTTPTransport == nil {
+ d.HTTPTransport = transport.New(d.Flagset)
+ }
+ if d.Sym == nil {
+ d.Sym = &symbolizer.Symbolizer{Obj: d.Obj, UI: d.UI, Transport: d.HTTPTransport}
+ }
+ return d
+}
+
+type stdUI struct {
+ r *bufio.Reader
+}
+
+func (ui *stdUI) ReadLine(prompt string) (string, error) {
+ os.Stdout.WriteString(prompt)
+ return ui.r.ReadString('\n')
+}
+
+func (ui *stdUI) Print(args ...interface{}) {
+ ui.fprint(os.Stderr, args)
+}
+
+func (ui *stdUI) PrintErr(args ...interface{}) {
+ ui.fprint(os.Stderr, args)
+}
+
+func (ui *stdUI) IsTerminal() bool {
+ return false
+}
+
+func (ui *stdUI) WantBrowser() bool {
+ return true
+}
+
+func (ui *stdUI) SetAutoComplete(func(string) string) {
+}
+
+func (ui *stdUI) fprint(f *os.File, args []interface{}) {
+ text := fmt.Sprint(args...)
+ if !strings.HasSuffix(text, "\n") {
+ text += "\n"
+ }
+ f.WriteString(text)
+}
+
+// oswriter implements the Writer interface using a regular file.
+type oswriter struct{}
+
+func (oswriter) Open(name string) (io.WriteCloser, error) {
+ f, err := os.Create(name)
+ return f, err
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go
new file mode 100644
index 0000000..f72314b
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/settings.go
@@ -0,0 +1,157 @@
+package driver
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/url"
+ "os"
+ "path/filepath"
+)
+
+// settings holds pprof settings.
+type settings struct {
+ // Configs holds a list of named UI configurations.
+ Configs []namedConfig `json:"configs"`
+}
+
+// namedConfig associates a name with a config.
+type namedConfig struct {
+ Name string `json:"name"`
+ config
+}
+
+// settingsFileName returns the name of the file where settings should be saved.
+func settingsFileName() (string, error) {
+ // Return "pprof/settings.json" under os.UserConfigDir().
+ dir, err := os.UserConfigDir()
+ if err != nil {
+ return "", err
+ }
+ return filepath.Join(dir, "pprof", "settings.json"), nil
+}
+
+// readSettings reads settings from fname.
+func readSettings(fname string) (*settings, error) {
+ data, err := ioutil.ReadFile(fname)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return &settings{}, nil
+ }
+ return nil, fmt.Errorf("could not read settings: %w", err)
+ }
+ settings := &settings{}
+ if err := json.Unmarshal(data, settings); err != nil {
+ return nil, fmt.Errorf("could not parse settings: %w", err)
+ }
+ for i := range settings.Configs {
+ settings.Configs[i].resetTransient()
+ }
+ return settings, nil
+}
+
+// writeSettings saves settings to fname.
+func writeSettings(fname string, settings *settings) error {
+ data, err := json.MarshalIndent(settings, "", " ")
+ if err != nil {
+ return fmt.Errorf("could not encode settings: %w", err)
+ }
+
+ // create the settings directory if it does not exist
+ // XDG specifies permissions 0700 when creating settings dirs:
+ // https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
+ if err := os.MkdirAll(filepath.Dir(fname), 0700); err != nil {
+ return fmt.Errorf("failed to create settings directory: %w", err)
+ }
+
+ if err := ioutil.WriteFile(fname, data, 0644); err != nil {
+ return fmt.Errorf("failed to write settings: %w", err)
+ }
+ return nil
+}
+
+// configMenuEntry holds information for a single config menu entry.
+type configMenuEntry struct {
+ Name string
+ URL string
+ Current bool // Is this the currently selected config?
+ UserConfig bool // Is this a user-provided config?
+}
+
+// configMenu returns a list of items to add to a menu in the web UI.
+func configMenu(fname string, url url.URL) []configMenuEntry {
+ // Start with system configs.
+ configs := []namedConfig{{Name: "Default", config: defaultConfig()}}
+ if settings, err := readSettings(fname); err == nil {
+ // Add user configs.
+ configs = append(configs, settings.Configs...)
+ }
+
+ // Convert to menu entries.
+ result := make([]configMenuEntry, len(configs))
+ lastMatch := -1
+ for i, cfg := range configs {
+ dst, changed := cfg.config.makeURL(url)
+ if !changed {
+ lastMatch = i
+ }
+ result[i] = configMenuEntry{
+ Name: cfg.Name,
+ URL: dst.String(),
+ UserConfig: (i != 0),
+ }
+ }
+ // Mark the last matching config as currennt
+ if lastMatch >= 0 {
+ result[lastMatch].Current = true
+ }
+ return result
+}
+
+// editSettings edits settings by applying fn to them.
+func editSettings(fname string, fn func(s *settings) error) error {
+ settings, err := readSettings(fname)
+ if err != nil {
+ return err
+ }
+ if err := fn(settings); err != nil {
+ return err
+ }
+ return writeSettings(fname, settings)
+}
+
+// setConfig saves the config specified in request to fname.
+func setConfig(fname string, request url.URL) error {
+ q := request.Query()
+ name := q.Get("config")
+ if name == "" {
+ return fmt.Errorf("invalid config name")
+ }
+ cfg := currentConfig()
+ if err := cfg.applyURL(q); err != nil {
+ return err
+ }
+ return editSettings(fname, func(s *settings) error {
+ for i, c := range s.Configs {
+ if c.Name == name {
+ s.Configs[i].config = cfg
+ return nil
+ }
+ }
+ s.Configs = append(s.Configs, namedConfig{Name: name, config: cfg})
+ return nil
+ })
+}
+
+// removeConfig removes config from fname.
+func removeConfig(fname, config string) error {
+ return editSettings(fname, func(s *settings) error {
+ for i, c := range s.Configs {
+ if c.Name == config {
+ s.Configs = append(s.Configs[:i], s.Configs[i+1:]...)
+ return nil
+ }
+ }
+ return fmt.Errorf("config %s not found", config)
+ })
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go
new file mode 100644
index 0000000..62767e7
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/svg.go
@@ -0,0 +1,80 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/google/pprof/third_party/svgpan"
+)
+
+var (
+ viewBox = regexp.MustCompile(`<svg\s*width="[^"]+"\s*height="[^"]+"\s*viewBox="[^"]+"`)
+ graphID = regexp.MustCompile(`<g id="graph\d"`)
+ svgClose = regexp.MustCompile(`</svg>`)
+)
+
+// massageSVG enhances the SVG output from DOT to provide better
+// panning inside a web browser. It uses the svgpan library, which is
+// embedded into the svgpan.JSSource variable.
+func massageSVG(svg string) string {
+ // Work around for dot bug which misses quoting some ampersands,
+ // resulting on unparsable SVG.
+ svg = strings.Replace(svg, "&;", "&amp;;", -1)
+
+ // Dot's SVG output is
+ //
+ // <svg width="___" height="___"
+ // viewBox="___" xmlns=...>
+ // <g id="graph0" transform="...">
+ // ...
+ // </g>
+ // </svg>
+ //
+ // Change it to
+ //
+ // <svg width="100%" height="100%"
+ // xmlns=...>
+
+ // <script type="text/ecmascript"><![CDATA[` ..$(svgpan.JSSource)... `]]></script>`
+ // <g id="viewport" transform="translate(0,0)">
+ // <g id="graph0" transform="...">
+ // ...
+ // </g>
+ // </g>
+ // </svg>
+
+ if loc := viewBox.FindStringIndex(svg); loc != nil {
+ svg = svg[:loc[0]] +
+ `<svg width="100%" height="100%"` +
+ svg[loc[1]:]
+ }
+
+ if loc := graphID.FindStringIndex(svg); loc != nil {
+ svg = svg[:loc[0]] +
+ `<script type="text/ecmascript"><![CDATA[` + string(svgpan.JSSource) + `]]></script>` +
+ `<g id="viewport" transform="scale(0.5,0.5) translate(0,0)">` +
+ svg[loc[0]:]
+ }
+
+ if loc := svgClose.FindStringIndex(svg); loc != nil {
+ svg = svg[:loc[0]] +
+ `</g>` +
+ svg[loc[0]:]
+ }
+
+ return svg
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go
new file mode 100644
index 0000000..b6c8776
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/tempfile.go
@@ -0,0 +1,60 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+)
+
+// newTempFile returns a new output file in dir with the provided prefix and suffix.
+func newTempFile(dir, prefix, suffix string) (*os.File, error) {
+ for index := 1; index < 10000; index++ {
+ switch f, err := os.OpenFile(filepath.Join(dir, fmt.Sprintf("%s%03d%s", prefix, index, suffix)), os.O_RDWR|os.O_CREATE|os.O_EXCL, 0666); {
+ case err == nil:
+ return f, nil
+ case !os.IsExist(err):
+ return nil, err
+ }
+ }
+ // Give up
+ return nil, fmt.Errorf("could not create file of the form %s%03d%s", prefix, 1, suffix)
+}
+
+var tempFiles []string
+var tempFilesMu = sync.Mutex{}
+
+// deferDeleteTempFile marks a file to be deleted by next call to Cleanup()
+func deferDeleteTempFile(path string) {
+ tempFilesMu.Lock()
+ tempFiles = append(tempFiles, path)
+ tempFilesMu.Unlock()
+}
+
+// cleanupTempFiles removes any temporary files selected for deferred cleaning.
+func cleanupTempFiles() error {
+ tempFilesMu.Lock()
+ defer tempFilesMu.Unlock()
+ var lastErr error
+ for _, f := range tempFiles {
+ if err := os.Remove(f); err != nil {
+ lastErr = err
+ }
+ }
+ tempFiles = nil
+ return lastErr
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go
new file mode 100644
index 0000000..4f7610c
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/webhtml.go
@@ -0,0 +1,1403 @@
+// Copyright 2017 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "html/template"
+
+ "github.com/google/pprof/third_party/d3"
+ "github.com/google/pprof/third_party/d3flamegraph"
+)
+
+// addTemplates adds a set of template definitions to templates.
+func addTemplates(templates *template.Template) {
+ template.Must(templates.Parse(`{{define "d3script"}}` + d3.JSSource + `{{end}}`))
+ template.Must(templates.Parse(`{{define "d3flamegraphscript"}}` + d3flamegraph.JSSource + `{{end}}`))
+ template.Must(templates.Parse(`{{define "d3flamegraphcss"}}` + d3flamegraph.CSSSource + `{{end}}`))
+ template.Must(templates.Parse(`
+{{define "css"}}
+<style type="text/css">
+* {
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+}
+html, body {
+ height: 100%;
+}
+body {
+ font-family: 'Roboto', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
+ font-size: 13px;
+ line-height: 1.4;
+ display: flex;
+ flex-direction: column;
+}
+a {
+ color: #2a66d9;
+}
+.header {
+ display: flex;
+ align-items: center;
+ height: 44px;
+ min-height: 44px;
+ background-color: #eee;
+ color: #212121;
+ padding: 0 1rem;
+}
+.header > div {
+ margin: 0 0.125em;
+}
+.header .title h1 {
+ font-size: 1.75em;
+ margin-right: 1rem;
+}
+.header .title a {
+ color: #212121;
+ text-decoration: none;
+}
+.header .title a:hover {
+ text-decoration: underline;
+}
+.header .description {
+ width: 100%;
+ text-align: right;
+ white-space: nowrap;
+}
+@media screen and (max-width: 799px) {
+ .header input {
+ display: none;
+ }
+}
+#detailsbox {
+ display: none;
+ z-index: 1;
+ position: fixed;
+ top: 40px;
+ right: 20px;
+ background-color: #ffffff;
+ box-shadow: 0 1px 5px rgba(0,0,0,.3);
+ line-height: 24px;
+ padding: 1em;
+ text-align: left;
+}
+.header input {
+ background: white url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' style='pointer-events:none;display:block;width:100%25;height:100%25;fill:%23757575'%3E%3Cpath d='M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61.0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z'/%3E%3C/svg%3E") no-repeat 4px center/20px 20px;
+ border: 1px solid #d1d2d3;
+ border-radius: 2px 0 0 2px;
+ padding: 0.25em;
+ padding-left: 28px;
+ margin-left: 1em;
+ font-family: 'Roboto', 'Noto', sans-serif;
+ font-size: 1em;
+ line-height: 24px;
+ color: #212121;
+}
+.downArrow {
+ border-top: .36em solid #ccc;
+ border-left: .36em solid transparent;
+ border-right: .36em solid transparent;
+ margin-bottom: .05em;
+ margin-left: .5em;
+ transition: border-top-color 200ms;
+}
+.menu-item {
+ height: 100%;
+ text-transform: uppercase;
+ font-family: 'Roboto Medium', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
+ position: relative;
+}
+.menu-item .menu-name:hover {
+ opacity: 0.75;
+}
+.menu-item .menu-name:hover .downArrow {
+ border-top-color: #666;
+}
+.menu-name {
+ height: 100%;
+ padding: 0 0.5em;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+.submenu {
+ display: none;
+ z-index: 1;
+ margin-top: -4px;
+ min-width: 10em;
+ position: absolute;
+ left: 0px;
+ background-color: white;
+ box-shadow: 0 1px 5px rgba(0,0,0,.3);
+ font-size: 100%;
+ text-transform: none;
+}
+.menu-item, .submenu {
+ user-select: none;
+ -moz-user-select: none;
+ -ms-user-select: none;
+ -webkit-user-select: none;
+}
+.submenu hr {
+ border: 0;
+ border-top: 2px solid #eee;
+}
+.submenu a {
+ display: block;
+ padding: .5em 1em;
+ text-decoration: none;
+}
+.submenu a:hover, .submenu a.active {
+ color: white;
+ background-color: #6b82d6;
+}
+.submenu a.disabled {
+ color: gray;
+ pointer-events: none;
+}
+.menu-check-mark {
+ position: absolute;
+ left: 2px;
+}
+.menu-delete-btn {
+ position: absolute;
+ right: 2px;
+}
+
+{{/* Used to disable events when a modal dialog is displayed */}}
+#dialog-overlay {
+ display: none;
+ position: fixed;
+ left: 0px;
+ top: 0px;
+ width: 100%;
+ height: 100%;
+ background-color: rgba(1,1,1,0.1);
+}
+
+.dialog {
+ {{/* Displayed centered horizontally near the top */}}
+ display: none;
+ position: fixed;
+ margin: 0px;
+ top: 60px;
+ left: 50%;
+ transform: translateX(-50%);
+
+ z-index: 3;
+ font-size: 125%;
+ background-color: #ffffff;
+ box-shadow: 0 1px 5px rgba(0,0,0,.3);
+}
+.dialog-header {
+ font-size: 120%;
+ border-bottom: 1px solid #CCCCCC;
+ width: 100%;
+ text-align: center;
+ background: #EEEEEE;
+ user-select: none;
+}
+.dialog-footer {
+ border-top: 1px solid #CCCCCC;
+ width: 100%;
+ text-align: right;
+ padding: 10px;
+}
+.dialog-error {
+ margin: 10px;
+ color: red;
+}
+.dialog input {
+ margin: 10px;
+ font-size: inherit;
+}
+.dialog button {
+ margin-left: 10px;
+ font-size: inherit;
+}
+#save-dialog, #delete-dialog {
+ width: 50%;
+ max-width: 20em;
+}
+#delete-prompt {
+ padding: 10px;
+}
+
+#content {
+ overflow-y: scroll;
+ padding: 1em;
+}
+#top {
+ overflow-y: scroll;
+}
+#graph {
+ overflow: hidden;
+}
+#graph svg {
+ width: 100%;
+ height: auto;
+ padding: 10px;
+}
+#content.source .filename {
+ margin-top: 0;
+ margin-bottom: 1em;
+ font-size: 120%;
+}
+#content.source pre {
+ margin-bottom: 3em;
+}
+table {
+ border-spacing: 0px;
+ width: 100%;
+ padding-bottom: 1em;
+ white-space: nowrap;
+}
+table thead {
+ font-family: 'Roboto Medium', -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol';
+}
+table tr th {
+ position: sticky;
+ top: 0;
+ background-color: #ddd;
+ text-align: right;
+ padding: .3em .5em;
+}
+table tr td {
+ padding: .3em .5em;
+ text-align: right;
+}
+#top table tr th:nth-child(6),
+#top table tr th:nth-child(7),
+#top table tr td:nth-child(6),
+#top table tr td:nth-child(7) {
+ text-align: left;
+}
+#top table tr td:nth-child(6) {
+ width: 100%;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ white-space: nowrap;
+}
+#flathdr1, #flathdr2, #cumhdr1, #cumhdr2, #namehdr {
+ cursor: ns-resize;
+}
+.hilite {
+ background-color: #ebf5fb;
+ font-weight: bold;
+}
+</style>
+{{end}}
+
+{{define "header"}}
+<div class="header">
+ <div class="title">
+ <h1><a href="./">pprof</a></h1>
+ </div>
+
+ <div id="view" class="menu-item">
+ <div class="menu-name">
+ View
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ <a title="{{.Help.top}}" href="./top" id="topbtn">Top</a>
+ <a title="{{.Help.graph}}" href="./" id="graphbtn">Graph</a>
+ <a title="{{.Help.flamegraph}}" href="./flamegraph" id="flamegraph">Flame Graph</a>
+ <a title="{{.Help.peek}}" href="./peek" id="peek">Peek</a>
+ <a title="{{.Help.list}}" href="./source" id="list">Source</a>
+ <a title="{{.Help.disasm}}" href="./disasm" id="disasm">Disassemble</a>
+ </div>
+ </div>
+
+ {{$sampleLen := len .SampleTypes}}
+ {{if gt $sampleLen 1}}
+ <div id="sample" class="menu-item">
+ <div class="menu-name">
+ Sample
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ {{range .SampleTypes}}
+ <a href="?si={{.}}" id="{{.}}">{{.}}</a>
+ {{end}}
+ </div>
+ </div>
+ {{end}}
+
+ <div id="refine" class="menu-item">
+ <div class="menu-name">
+ Refine
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ <a title="{{.Help.focus}}" href="?" id="focus">Focus</a>
+ <a title="{{.Help.ignore}}" href="?" id="ignore">Ignore</a>
+ <a title="{{.Help.hide}}" href="?" id="hide">Hide</a>
+ <a title="{{.Help.show}}" href="?" id="show">Show</a>
+ <a title="{{.Help.show_from}}" href="?" id="show-from">Show from</a>
+ <hr>
+ <a title="{{.Help.reset}}" href="?">Reset</a>
+ </div>
+ </div>
+
+ <div id="config" class="menu-item">
+ <div class="menu-name">
+ Config
+ <i class="downArrow"></i>
+ </div>
+ <div class="submenu">
+ <a title="{{.Help.save_config}}" id="save-config">Save as ...</a>
+ <hr>
+ {{range .Configs}}
+ <a href="{{.URL}}">
+ {{if .Current}}<span class="menu-check-mark">✓</span>{{end}}
+ {{.Name}}
+ {{if .UserConfig}}<span class="menu-delete-btn" data-config={{.Name}}>🗙</span>{{end}}
+ </a>
+ {{end}}
+ </div>
+ </div>
+
+ <div>
+ <input id="search" type="text" placeholder="Search regexp" autocomplete="off" autocapitalize="none" size=40>
+ </div>
+
+ <div class="description">
+ <a title="{{.Help.details}}" href="#" id="details">{{.Title}}</a>
+ <div id="detailsbox">
+ {{range .Legend}}<div>{{.}}</div>{{end}}
+ </div>
+ </div>
+</div>
+
+<div id="dialog-overlay"></div>
+
+<div class="dialog" id="save-dialog">
+ <div class="dialog-header">Save options as</div>
+ <datalist id="config-list">
+ {{range .Configs}}{{if .UserConfig}}<option value="{{.Name}}" />{{end}}{{end}}
+ </datalist>
+ <input id="save-name" type="text" list="config-list" placeholder="New config" />
+ <div class="dialog-footer">
+ <span class="dialog-error" id="save-error"></span>
+ <button id="save-cancel">Cancel</button>
+ <button id="save-confirm">Save</button>
+ </div>
+</div>
+
+<div class="dialog" id="delete-dialog">
+ <div class="dialog-header" id="delete-dialog-title">Delete config</div>
+ <div id="delete-prompt"></div>
+ <div class="dialog-footer">
+ <span class="dialog-error" id="delete-error"></span>
+ <button id="delete-cancel">Cancel</button>
+ <button id="delete-confirm">Delete</button>
+ </div>
+</div>
+
+<div id="errors">{{range .Errors}}<div>{{.}}</div>{{end}}</div>
+{{end}}
+
+{{define "graph" -}}
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="graph">
+ {{.HTMLBody}}
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), {{.Nodes}});</script>
+</body>
+</html>
+{{end}}
+
+{{define "script"}}
+<script>
+// Make svg pannable and zoomable.
+// Call clickHandler(t) if a click event is caught by the pan event handlers.
+function initPanAndZoom(svg, clickHandler) {
+ 'use strict';
+
+ // Current mouse/touch handling mode
+ const IDLE = 0;
+ const MOUSEPAN = 1;
+ const TOUCHPAN = 2;
+ const TOUCHZOOM = 3;
+ let mode = IDLE;
+
+ // State needed to implement zooming.
+ let currentScale = 1.0;
+ const initWidth = svg.viewBox.baseVal.width;
+ const initHeight = svg.viewBox.baseVal.height;
+
+ // State needed to implement panning.
+ let panLastX = 0; // Last event X coordinate
+ let panLastY = 0; // Last event Y coordinate
+ let moved = false; // Have we seen significant movement
+ let touchid = null; // Current touch identifier
+
+ // State needed for pinch zooming
+ let touchid2 = null; // Second id for pinch zooming
+ let initGap = 1.0; // Starting gap between two touches
+ let initScale = 1.0; // currentScale when pinch zoom started
+ let centerPoint = null; // Center point for scaling
+
+ // Convert event coordinates to svg coordinates.
+ function toSvg(x, y) {
+ const p = svg.createSVGPoint();
+ p.x = x;
+ p.y = y;
+ let m = svg.getCTM();
+ if (m == null) m = svg.getScreenCTM(); // Firefox workaround.
+ return p.matrixTransform(m.inverse());
+ }
+
+ // Change the scaling for the svg to s, keeping the point denoted
+ // by u (in svg coordinates]) fixed at the same screen location.
+ function rescale(s, u) {
+ // Limit to a good range.
+ if (s < 0.2) s = 0.2;
+ if (s > 10.0) s = 10.0;
+
+ currentScale = s;
+
+ // svg.viewBox defines the visible portion of the user coordinate
+ // system. So to magnify by s, divide the visible portion by s,
+ // which will then be stretched to fit the viewport.
+ const vb = svg.viewBox;
+ const w1 = vb.baseVal.width;
+ const w2 = initWidth / s;
+ const h1 = vb.baseVal.height;
+ const h2 = initHeight / s;
+ vb.baseVal.width = w2;
+ vb.baseVal.height = h2;
+
+ // We also want to adjust vb.baseVal.x so that u.x remains at same
+ // screen X coordinate. In other words, want to change it from x1 to x2
+ // so that:
+ // (u.x - x1) / w1 = (u.x - x2) / w2
+ // Simplifying that, we get
+ // (u.x - x1) * (w2 / w1) = u.x - x2
+ // x2 = u.x - (u.x - x1) * (w2 / w1)
+ vb.baseVal.x = u.x - (u.x - vb.baseVal.x) * (w2 / w1);
+ vb.baseVal.y = u.y - (u.y - vb.baseVal.y) * (h2 / h1);
+ }
+
+ function handleWheel(e) {
+ if (e.deltaY == 0) return;
+ // Change scale factor by 1.1 or 1/1.1
+ rescale(currentScale * (e.deltaY < 0 ? 1.1 : (1/1.1)),
+ toSvg(e.offsetX, e.offsetY));
+ }
+
+ function setMode(m) {
+ mode = m;
+ touchid = null;
+ touchid2 = null;
+ }
+
+ function panStart(x, y) {
+ moved = false;
+ panLastX = x;
+ panLastY = y;
+ }
+
+ function panMove(x, y) {
+ let dx = x - panLastX;
+ let dy = y - panLastY;
+ if (Math.abs(dx) <= 2 && Math.abs(dy) <= 2) return; // Ignore tiny moves
+
+ moved = true;
+ panLastX = x;
+ panLastY = y;
+
+ // Firefox workaround: get dimensions from parentNode.
+ const swidth = svg.clientWidth || svg.parentNode.clientWidth;
+ const sheight = svg.clientHeight || svg.parentNode.clientHeight;
+
+ // Convert deltas from screen space to svg space.
+ dx *= (svg.viewBox.baseVal.width / swidth);
+ dy *= (svg.viewBox.baseVal.height / sheight);
+
+ svg.viewBox.baseVal.x -= dx;
+ svg.viewBox.baseVal.y -= dy;
+ }
+
+ function handleScanStart(e) {
+ if (e.button != 0) return; // Do not catch right-clicks etc.
+ setMode(MOUSEPAN);
+ panStart(e.clientX, e.clientY);
+ e.preventDefault();
+ svg.addEventListener('mousemove', handleScanMove);
+ }
+
+ function handleScanMove(e) {
+ if (e.buttons == 0) {
+ // Missed an end event, perhaps because mouse moved outside window.
+ setMode(IDLE);
+ svg.removeEventListener('mousemove', handleScanMove);
+ return;
+ }
+ if (mode == MOUSEPAN) panMove(e.clientX, e.clientY);
+ }
+
+ function handleScanEnd(e) {
+ if (mode == MOUSEPAN) panMove(e.clientX, e.clientY);
+ setMode(IDLE);
+ svg.removeEventListener('mousemove', handleScanMove);
+ if (!moved) clickHandler(e.target);
+ }
+
+ // Find touch object with specified identifier.
+ function findTouch(tlist, id) {
+ for (const t of tlist) {
+ if (t.identifier == id) return t;
+ }
+ return null;
+ }
+
+ // Return distance between two touch points
+ function touchGap(t1, t2) {
+ const dx = t1.clientX - t2.clientX;
+ const dy = t1.clientY - t2.clientY;
+ return Math.hypot(dx, dy);
+ }
+
+ function handleTouchStart(e) {
+ if (mode == IDLE && e.changedTouches.length == 1) {
+ // Start touch based panning
+ const t = e.changedTouches[0];
+ setMode(TOUCHPAN);
+ touchid = t.identifier;
+ panStart(t.clientX, t.clientY);
+ e.preventDefault();
+ } else if (mode == TOUCHPAN && e.touches.length == 2) {
+ // Start pinch zooming
+ setMode(TOUCHZOOM);
+ const t1 = e.touches[0];
+ const t2 = e.touches[1];
+ touchid = t1.identifier;
+ touchid2 = t2.identifier;
+ initScale = currentScale;
+ initGap = touchGap(t1, t2);
+ centerPoint = toSvg((t1.clientX + t2.clientX) / 2,
+ (t1.clientY + t2.clientY) / 2);
+ e.preventDefault();
+ }
+ }
+
+ function handleTouchMove(e) {
+ if (mode == TOUCHPAN) {
+ const t = findTouch(e.changedTouches, touchid);
+ if (t == null) return;
+ if (e.touches.length != 1) {
+ setMode(IDLE);
+ return;
+ }
+ panMove(t.clientX, t.clientY);
+ e.preventDefault();
+ } else if (mode == TOUCHZOOM) {
+ // Get two touches; new gap; rescale to ratio.
+ const t1 = findTouch(e.touches, touchid);
+ const t2 = findTouch(e.touches, touchid2);
+ if (t1 == null || t2 == null) return;
+ const gap = touchGap(t1, t2);
+ rescale(initScale * gap / initGap, centerPoint);
+ e.preventDefault();
+ }
+ }
+
+ function handleTouchEnd(e) {
+ if (mode == TOUCHPAN) {
+ const t = findTouch(e.changedTouches, touchid);
+ if (t == null) return;
+ panMove(t.clientX, t.clientY);
+ setMode(IDLE);
+ e.preventDefault();
+ if (!moved) clickHandler(t.target);
+ } else if (mode == TOUCHZOOM) {
+ setMode(IDLE);
+ e.preventDefault();
+ }
+ }
+
+ svg.addEventListener('mousedown', handleScanStart);
+ svg.addEventListener('mouseup', handleScanEnd);
+ svg.addEventListener('touchstart', handleTouchStart);
+ svg.addEventListener('touchmove', handleTouchMove);
+ svg.addEventListener('touchend', handleTouchEnd);
+ svg.addEventListener('wheel', handleWheel, true);
+}
+
+function initMenus() {
+ 'use strict';
+
+ let activeMenu = null;
+ let activeMenuHdr = null;
+
+ function cancelActiveMenu() {
+ if (activeMenu == null) return;
+ activeMenu.style.display = 'none';
+ activeMenu = null;
+ activeMenuHdr = null;
+ }
+
+ // Set click handlers on every menu header.
+ for (const menu of document.getElementsByClassName('submenu')) {
+ const hdr = menu.parentElement;
+ if (hdr == null) return;
+ if (hdr.classList.contains('disabled')) return;
+ function showMenu(e) {
+ // menu is a child of hdr, so this event can fire for clicks
+ // inside menu. Ignore such clicks.
+ if (e.target.parentElement != hdr) return;
+ activeMenu = menu;
+ activeMenuHdr = hdr;
+ menu.style.display = 'block';
+ }
+ hdr.addEventListener('mousedown', showMenu);
+ hdr.addEventListener('touchstart', showMenu);
+ }
+
+ // If there is an active menu and a down event outside, retract the menu.
+ for (const t of ['mousedown', 'touchstart']) {
+ document.addEventListener(t, (e) => {
+ // Note: to avoid unnecessary flicker, if the down event is inside
+ // the active menu header, do not retract the menu.
+ if (activeMenuHdr != e.target.closest('.menu-item')) {
+ cancelActiveMenu();
+ }
+ }, { passive: true, capture: true });
+ }
+
+ // If there is an active menu and an up event inside, retract the menu.
+ document.addEventListener('mouseup', (e) => {
+ if (activeMenu == e.target.closest('.submenu')) {
+ cancelActiveMenu();
+ }
+ }, { passive: true, capture: true });
+}
+
+function sendURL(method, url, done) {
+ fetch(url.toString(), {method: method})
+ .then((response) => { done(response.ok); })
+ .catch((error) => { done(false); });
+}
+
+// Initialize handlers for saving/loading configurations.
+function initConfigManager() {
+ 'use strict';
+
+ // Initialize various elements.
+ function elem(id) {
+ const result = document.getElementById(id);
+ if (!result) console.warn('element ' + id + ' not found');
+ return result;
+ }
+ const overlay = elem('dialog-overlay');
+ const saveDialog = elem('save-dialog');
+ const saveInput = elem('save-name');
+ const saveError = elem('save-error');
+ const delDialog = elem('delete-dialog');
+ const delPrompt = elem('delete-prompt');
+ const delError = elem('delete-error');
+
+ let currentDialog = null;
+ let currentDeleteTarget = null;
+
+ function showDialog(dialog) {
+ if (currentDialog != null) {
+ overlay.style.display = 'none';
+ currentDialog.style.display = 'none';
+ }
+ currentDialog = dialog;
+ if (dialog != null) {
+ overlay.style.display = 'block';
+ dialog.style.display = 'block';
+ }
+ }
+
+ function cancelDialog(e) {
+ showDialog(null);
+ }
+
+ // Show dialog for saving the current config.
+ function showSaveDialog(e) {
+ saveError.innerText = '';
+ showDialog(saveDialog);
+ saveInput.focus();
+ }
+
+ // Commit save config.
+ function commitSave(e) {
+ const name = saveInput.value;
+ const url = new URL(document.URL);
+ // Set path relative to existing path.
+ url.pathname = new URL('./saveconfig', document.URL).pathname;
+ url.searchParams.set('config', name);
+ saveError.innerText = '';
+ sendURL('POST', url, (ok) => {
+ if (!ok) {
+ saveError.innerText = 'Save failed';
+ } else {
+ showDialog(null);
+ location.reload(); // Reload to show updated config menu
+ }
+ });
+ }
+
+ function handleSaveInputKey(e) {
+ if (e.key === 'Enter') commitSave(e);
+ }
+
+ function deleteConfig(e, elem) {
+ e.preventDefault();
+ const config = elem.dataset.config;
+ delPrompt.innerText = 'Delete ' + config + '?';
+ currentDeleteTarget = elem;
+ showDialog(delDialog);
+ }
+
+ function commitDelete(e, elem) {
+ if (!currentDeleteTarget) return;
+ const config = currentDeleteTarget.dataset.config;
+ const url = new URL('./deleteconfig', document.URL);
+ url.searchParams.set('config', config);
+ delError.innerText = '';
+ sendURL('DELETE', url, (ok) => {
+ if (!ok) {
+ delError.innerText = 'Delete failed';
+ return;
+ }
+ showDialog(null);
+ // Remove menu entry for this config.
+ if (currentDeleteTarget && currentDeleteTarget.parentElement) {
+ currentDeleteTarget.parentElement.remove();
+ }
+ });
+ }
+
+ // Bind event on elem to fn.
+ function bind(event, elem, fn) {
+ if (elem == null) return;
+ elem.addEventListener(event, fn);
+ if (event == 'click') {
+ // Also enable via touch.
+ elem.addEventListener('touchstart', fn);
+ }
+ }
+
+ bind('click', elem('save-config'), showSaveDialog);
+ bind('click', elem('save-cancel'), cancelDialog);
+ bind('click', elem('save-confirm'), commitSave);
+ bind('keydown', saveInput, handleSaveInputKey);
+
+ bind('click', elem('delete-cancel'), cancelDialog);
+ bind('click', elem('delete-confirm'), commitDelete);
+
+ // Activate deletion button for all config entries in menu.
+ for (const del of Array.from(document.getElementsByClassName('menu-delete-btn'))) {
+ bind('click', del, (e) => {
+ deleteConfig(e, del);
+ });
+ }
+}
+
+function viewer(baseUrl, nodes) {
+ 'use strict';
+
+ // Elements
+ const search = document.getElementById('search');
+ const graph0 = document.getElementById('graph0');
+ const svg = (graph0 == null ? null : graph0.parentElement);
+ const toptable = document.getElementById('toptable');
+
+ let regexpActive = false;
+ let selected = new Map();
+ let origFill = new Map();
+ let searchAlarm = null;
+ let buttonsEnabled = true;
+
+ function handleDetails(e) {
+ e.preventDefault();
+ const detailsText = document.getElementById('detailsbox');
+ if (detailsText != null) {
+ if (detailsText.style.display === 'block') {
+ detailsText.style.display = 'none';
+ } else {
+ detailsText.style.display = 'block';
+ }
+ }
+ }
+
+ function handleKey(e) {
+ if (e.keyCode != 13) return;
+ setHrefParams(window.location, function (params) {
+ params.set('f', search.value);
+ });
+ e.preventDefault();
+ }
+
+ function handleSearch() {
+ // Delay expensive processing so a flurry of key strokes is handled once.
+ if (searchAlarm != null) {
+ clearTimeout(searchAlarm);
+ }
+ searchAlarm = setTimeout(selectMatching, 300);
+
+ regexpActive = true;
+ updateButtons();
+ }
+
+ function selectMatching() {
+ searchAlarm = null;
+ let re = null;
+ if (search.value != '') {
+ try {
+ re = new RegExp(search.value);
+ } catch (e) {
+ // TODO: Display error state in search box
+ return;
+ }
+ }
+
+ function match(text) {
+ return re != null && re.test(text);
+ }
+
+ // drop currently selected items that do not match re.
+ selected.forEach(function(v, n) {
+ if (!match(nodes[n])) {
+ unselect(n, document.getElementById('node' + n));
+ }
+ })
+
+ // add matching items that are not currently selected.
+ if (nodes) {
+ for (let n = 0; n < nodes.length; n++) {
+ if (!selected.has(n) && match(nodes[n])) {
+ select(n, document.getElementById('node' + n));
+ }
+ }
+ }
+
+ updateButtons();
+ }
+
+ function toggleSvgSelect(elem) {
+ // Walk up to immediate child of graph0
+ while (elem != null && elem.parentElement != graph0) {
+ elem = elem.parentElement;
+ }
+ if (!elem) return;
+
+ // Disable regexp mode.
+ regexpActive = false;
+
+ const n = nodeId(elem);
+ if (n < 0) return;
+ if (selected.has(n)) {
+ unselect(n, elem);
+ } else {
+ select(n, elem);
+ }
+ updateButtons();
+ }
+
+ function unselect(n, elem) {
+ if (elem == null) return;
+ selected.delete(n);
+ setBackground(elem, false);
+ }
+
+ function select(n, elem) {
+ if (elem == null) return;
+ selected.set(n, true);
+ setBackground(elem, true);
+ }
+
+ function nodeId(elem) {
+ const id = elem.id;
+ if (!id) return -1;
+ if (!id.startsWith('node')) return -1;
+ const n = parseInt(id.slice(4), 10);
+ if (isNaN(n)) return -1;
+ if (n < 0 || n >= nodes.length) return -1;
+ return n;
+ }
+
+ function setBackground(elem, set) {
+ // Handle table row highlighting.
+ if (elem.nodeName == 'TR') {
+ elem.classList.toggle('hilite', set);
+ return;
+ }
+
+ // Handle svg element highlighting.
+ const p = findPolygon(elem);
+ if (p != null) {
+ if (set) {
+ origFill.set(p, p.style.fill);
+ p.style.fill = '#ccccff';
+ } else if (origFill.has(p)) {
+ p.style.fill = origFill.get(p);
+ }
+ }
+ }
+
+ function findPolygon(elem) {
+ if (elem.localName == 'polygon') return elem;
+ for (const c of elem.children) {
+ const p = findPolygon(c);
+ if (p != null) return p;
+ }
+ return null;
+ }
+
+ // convert a string to a regexp that matches that string.
+ function quotemeta(str) {
+ return str.replace(/([\\\.?+*\[\](){}|^$])/g, '\\$1');
+ }
+
+ function setSampleIndexLink(id) {
+ const elem = document.getElementById(id);
+ if (elem != null) {
+ setHrefParams(elem, function (params) {
+ params.set("si", id);
+ });
+ }
+ }
+
+ // Update id's href to reflect current selection whenever it is
+ // liable to be followed.
+ function makeSearchLinkDynamic(id) {
+ const elem = document.getElementById(id);
+ if (elem == null) return;
+
+ // Most links copy current selection into the 'f' parameter,
+ // but Refine menu links are different.
+ let param = 'f';
+ if (id == 'ignore') param = 'i';
+ if (id == 'hide') param = 'h';
+ if (id == 'show') param = 's';
+ if (id == 'show-from') param = 'sf';
+
+ // We update on mouseenter so middle-click/right-click work properly.
+ elem.addEventListener('mouseenter', updater);
+ elem.addEventListener('touchstart', updater);
+
+ function updater() {
+ // The selection can be in one of two modes: regexp-based or
+ // list-based. Construct regular expression depending on mode.
+ let re = regexpActive
+ ? search.value
+ : Array.from(selected.keys()).map(key => quotemeta(nodes[key])).join('|');
+
+ setHrefParams(elem, function (params) {
+ if (re != '') {
+ // For focus/show/show-from, forget old parameter. For others, add to re.
+ if (param != 'f' && param != 's' && param != 'sf' && params.has(param)) {
+ const old = params.get(param);
+ if (old != '') {
+ re += '|' + old;
+ }
+ }
+ params.set(param, re);
+ } else {
+ params.delete(param);
+ }
+ });
+ }
+ }
+
+ function setHrefParams(elem, paramSetter) {
+ let url = new URL(elem.href);
+ url.hash = '';
+
+ // Copy params from this page's URL.
+ const params = url.searchParams;
+ for (const p of new URLSearchParams(window.location.search)) {
+ params.set(p[0], p[1]);
+ }
+
+ // Give the params to the setter to modify.
+ paramSetter(params);
+
+ elem.href = url.toString();
+ }
+
+ function handleTopClick(e) {
+ // Walk back until we find TR and then get the Name column (index 5)
+ let elem = e.target;
+ while (elem != null && elem.nodeName != 'TR') {
+ elem = elem.parentElement;
+ }
+ if (elem == null || elem.children.length < 6) return;
+
+ e.preventDefault();
+ const tr = elem;
+ const td = elem.children[5];
+ if (td.nodeName != 'TD') return;
+ const name = td.innerText;
+ const index = nodes.indexOf(name);
+ if (index < 0) return;
+
+ // Disable regexp mode.
+ regexpActive = false;
+
+ if (selected.has(index)) {
+ unselect(index, elem);
+ } else {
+ select(index, elem);
+ }
+ updateButtons();
+ }
+
+ function updateButtons() {
+ const enable = (search.value != '' || selected.size != 0);
+ if (buttonsEnabled == enable) return;
+ buttonsEnabled = enable;
+ for (const id of ['focus', 'ignore', 'hide', 'show', 'show-from']) {
+ const link = document.getElementById(id);
+ if (link != null) {
+ link.classList.toggle('disabled', !enable);
+ }
+ }
+ }
+
+ // Initialize button states
+ updateButtons();
+
+ // Setup event handlers
+ initMenus();
+ if (svg != null) {
+ initPanAndZoom(svg, toggleSvgSelect);
+ }
+ if (toptable != null) {
+ toptable.addEventListener('mousedown', handleTopClick);
+ toptable.addEventListener('touchstart', handleTopClick);
+ }
+
+ const ids = ['topbtn', 'graphbtn', 'flamegraph', 'peek', 'list', 'disasm',
+ 'focus', 'ignore', 'hide', 'show', 'show-from'];
+ ids.forEach(makeSearchLinkDynamic);
+
+ const sampleIDs = [{{range .SampleTypes}}'{{.}}', {{end}}];
+ sampleIDs.forEach(setSampleIndexLink);
+
+ // Bind action to button with specified id.
+ function addAction(id, action) {
+ const btn = document.getElementById(id);
+ if (btn != null) {
+ btn.addEventListener('click', action);
+ btn.addEventListener('touchstart', action);
+ }
+ }
+
+ addAction('details', handleDetails);
+ initConfigManager();
+
+ search.addEventListener('input', handleSearch);
+ search.addEventListener('keydown', handleKey);
+
+ // Give initial focus to main container so it can be scrolled using keys.
+ const main = document.getElementById('bodycontainer');
+ if (main) {
+ main.focus();
+ }
+}
+</script>
+{{end}}
+
+{{define "top" -}}
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ <style type="text/css">
+ </style>
+</head>
+<body>
+ {{template "header" .}}
+ <div id="top">
+ <table id="toptable">
+ <thead>
+ <tr>
+ <th id="flathdr1">Flat</th>
+ <th id="flathdr2">Flat%</th>
+ <th>Sum%</th>
+ <th id="cumhdr1">Cum</th>
+ <th id="cumhdr2">Cum%</th>
+ <th id="namehdr">Name</th>
+ <th>Inlined?</th>
+ </tr>
+ </thead>
+ <tbody id="rows"></tbody>
+ </table>
+ </div>
+ {{template "script" .}}
+ <script>
+ function makeTopTable(total, entries) {
+ const rows = document.getElementById('rows');
+ if (rows == null) return;
+
+ // Store initial index in each entry so we have stable node ids for selection.
+ for (let i = 0; i < entries.length; i++) {
+ entries[i].Id = 'node' + i;
+ }
+
+ // Which column are we currently sorted by and in what order?
+ let currentColumn = '';
+ let descending = false;
+ sortBy('Flat');
+
+ function sortBy(column) {
+ // Update sort criteria
+ if (column == currentColumn) {
+ descending = !descending; // Reverse order
+ } else {
+ currentColumn = column;
+ descending = (column != 'Name');
+ }
+
+ // Sort according to current criteria.
+ function cmp(a, b) {
+ const av = a[currentColumn];
+ const bv = b[currentColumn];
+ if (av < bv) return -1;
+ if (av > bv) return +1;
+ return 0;
+ }
+ entries.sort(cmp);
+ if (descending) entries.reverse();
+
+ function addCell(tr, val) {
+ const td = document.createElement('td');
+ td.textContent = val;
+ tr.appendChild(td);
+ }
+
+ function percent(v) {
+ return (v * 100.0 / total).toFixed(2) + '%';
+ }
+
+ // Generate rows
+ const fragment = document.createDocumentFragment();
+ let sum = 0;
+ for (const row of entries) {
+ const tr = document.createElement('tr');
+ tr.id = row.Id;
+ sum += row.Flat;
+ addCell(tr, row.FlatFormat);
+ addCell(tr, percent(row.Flat));
+ addCell(tr, percent(sum));
+ addCell(tr, row.CumFormat);
+ addCell(tr, percent(row.Cum));
+ addCell(tr, row.Name);
+ addCell(tr, row.InlineLabel);
+ fragment.appendChild(tr);
+ }
+
+ rows.textContent = ''; // Remove old rows
+ rows.appendChild(fragment);
+ }
+
+ // Make different column headers trigger sorting.
+ function bindSort(id, column) {
+ const hdr = document.getElementById(id);
+ if (hdr == null) return;
+ const fn = function() { sortBy(column) };
+ hdr.addEventListener('click', fn);
+ hdr.addEventListener('touch', fn);
+ }
+ bindSort('flathdr1', 'Flat');
+ bindSort('flathdr2', 'Flat');
+ bindSort('cumhdr1', 'Cum');
+ bindSort('cumhdr2', 'Cum');
+ bindSort('namehdr', 'Name');
+ }
+
+ viewer(new URL(window.location.href), {{.Nodes}});
+ makeTopTable({{.Total}}, {{.Top}});
+ </script>
+</body>
+</html>
+{{end}}
+
+{{define "sourcelisting" -}}
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ {{template "weblistcss" .}}
+ {{template "weblistjs" .}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="content" class="source">
+ {{.HTMLBody}}
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), null);</script>
+</body>
+</html>
+{{end}}
+
+{{define "plaintext" -}}
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+</head>
+<body>
+ {{template "header" .}}
+ <div id="content">
+ <pre>
+ {{.TextBody}}
+ </pre>
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), null);</script>
+</body>
+</html>
+{{end}}
+
+{{define "flamegraph" -}}
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>{{.Title}}</title>
+ {{template "css" .}}
+ <style type="text/css">{{template "d3flamegraphcss" .}}</style>
+ <style type="text/css">
+ .flamegraph-content {
+ width: 90%;
+ min-width: 80%;
+ margin-left: 5%;
+ }
+ .flamegraph-details {
+ height: 1.2em;
+ width: 90%;
+ min-width: 90%;
+ margin-left: 5%;
+ padding: 15px 0 35px;
+ }
+ </style>
+</head>
+<body>
+ {{template "header" .}}
+ <div id="bodycontainer">
+ <div id="flamegraphdetails" class="flamegraph-details"></div>
+ <div class="flamegraph-content">
+ <div id="chart"></div>
+ </div>
+ </div>
+ {{template "script" .}}
+ <script>viewer(new URL(window.location.href), {{.Nodes}});</script>
+ <script>{{template "d3script" .}}</script>
+ <script>{{template "d3flamegraphscript" .}}</script>
+ <script>
+ var data = {{.FlameGraph}};
+
+ var width = document.getElementById('chart').clientWidth;
+
+ var flameGraph = d3.flamegraph()
+ .width(width)
+ .cellHeight(18)
+ .minFrameSize(1)
+ .transitionDuration(750)
+ .transitionEase(d3.easeCubic)
+ .inverted(true)
+ .sort(true)
+ .title('')
+ .tooltip(false)
+ .details(document.getElementById('flamegraphdetails'));
+
+ // <full name> (percentage, value)
+ flameGraph.label((d) => d.data.f + ' (' + d.data.p + ', ' + d.data.l + ')');
+
+ (function(flameGraph) {
+ var oldColorMapper = flameGraph.color();
+ function colorMapper(d) {
+ // Hack to force default color mapper to use 'warm' color scheme by not passing libtype
+ const { data, highlight } = d;
+ return oldColorMapper({ data: { n: data.n }, highlight });
+ }
+
+ flameGraph.color(colorMapper);
+ }(flameGraph));
+
+ d3.select('#chart')
+ .datum(data)
+ .call(flameGraph);
+
+ function clear() {
+ flameGraph.clear();
+ }
+
+ function resetZoom() {
+ flameGraph.resetZoom();
+ }
+
+ window.addEventListener('resize', function() {
+ var width = document.getElementById('chart').clientWidth;
+ var graphs = document.getElementsByClassName('d3-flame-graph');
+ if (graphs.length > 0) {
+ graphs[0].setAttribute('width', width);
+ }
+ flameGraph.width(width);
+ flameGraph.resetZoom();
+ }, true);
+
+ var search = document.getElementById('search');
+ var searchAlarm = null;
+
+ function selectMatching() {
+ searchAlarm = null;
+
+ if (search.value != '') {
+ flameGraph.search(search.value);
+ } else {
+ flameGraph.clear();
+ }
+ }
+
+ function handleSearch() {
+ // Delay expensive processing so a flurry of key strokes is handled once.
+ if (searchAlarm != null) {
+ clearTimeout(searchAlarm);
+ }
+ searchAlarm = setTimeout(selectMatching, 300);
+ }
+
+ search.addEventListener('input', handleSearch);
+ </script>
+</body>
+</html>
+{{end}}
+`))
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go b/src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go
new file mode 100644
index 0000000..52dc688
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/driver/webui.go
@@ -0,0 +1,460 @@
+// Copyright 2017 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package driver
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net"
+ "net/http"
+ gourl "net/url"
+ "os"
+ "os/exec"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/report"
+ "github.com/google/pprof/profile"
+)
+
+// webInterface holds the state needed for serving a browser based interface.
+type webInterface struct {
+ prof *profile.Profile
+ options *plugin.Options
+ help map[string]string
+ templates *template.Template
+ settingsFile string
+}
+
+func makeWebInterface(p *profile.Profile, opt *plugin.Options) (*webInterface, error) {
+ settingsFile, err := settingsFileName()
+ if err != nil {
+ return nil, err
+ }
+ templates := template.New("templategroup")
+ addTemplates(templates)
+ report.AddSourceTemplates(templates)
+ return &webInterface{
+ prof: p,
+ options: opt,
+ help: make(map[string]string),
+ templates: templates,
+ settingsFile: settingsFile,
+ }, nil
+}
+
+// maxEntries is the maximum number of entries to print for text interfaces.
+const maxEntries = 50
+
+// errorCatcher is a UI that captures errors for reporting to the browser.
+type errorCatcher struct {
+ plugin.UI
+ errors []string
+}
+
+func (ec *errorCatcher) PrintErr(args ...interface{}) {
+ ec.errors = append(ec.errors, strings.TrimSuffix(fmt.Sprintln(args...), "\n"))
+ ec.UI.PrintErr(args...)
+}
+
+// webArgs contains arguments passed to templates in webhtml.go.
+type webArgs struct {
+ Title string
+ Errors []string
+ Total int64
+ SampleTypes []string
+ Legend []string
+ Help map[string]string
+ Nodes []string
+ HTMLBody template.HTML
+ TextBody string
+ Top []report.TextItem
+ FlameGraph template.JS
+ Configs []configMenuEntry
+}
+
+func serveWebInterface(hostport string, p *profile.Profile, o *plugin.Options, disableBrowser bool) error {
+ host, port, err := getHostAndPort(hostport)
+ if err != nil {
+ return err
+ }
+ interactiveMode = true
+ ui, err := makeWebInterface(p, o)
+ if err != nil {
+ return err
+ }
+ for n, c := range pprofCommands {
+ ui.help[n] = c.description
+ }
+ for n, help := range configHelp {
+ ui.help[n] = help
+ }
+ ui.help["details"] = "Show information about the profile and this view"
+ ui.help["graph"] = "Display profile as a directed graph"
+ ui.help["reset"] = "Show the entire profile"
+ ui.help["save_config"] = "Save current settings"
+
+ server := o.HTTPServer
+ if server == nil {
+ server = defaultWebServer
+ }
+ args := &plugin.HTTPServerArgs{
+ Hostport: net.JoinHostPort(host, strconv.Itoa(port)),
+ Host: host,
+ Port: port,
+ Handlers: map[string]http.Handler{
+ "/": http.HandlerFunc(ui.dot),
+ "/top": http.HandlerFunc(ui.top),
+ "/disasm": http.HandlerFunc(ui.disasm),
+ "/source": http.HandlerFunc(ui.source),
+ "/peek": http.HandlerFunc(ui.peek),
+ "/flamegraph": http.HandlerFunc(ui.flamegraph),
+ "/saveconfig": http.HandlerFunc(ui.saveConfig),
+ "/deleteconfig": http.HandlerFunc(ui.deleteConfig),
+ },
+ }
+
+ url := "http://" + args.Hostport
+
+ o.UI.Print("Serving web UI on ", url)
+
+ if o.UI.WantBrowser() && !disableBrowser {
+ go openBrowser(url, o)
+ }
+ return server(args)
+}
+
+func getHostAndPort(hostport string) (string, int, error) {
+ host, portStr, err := net.SplitHostPort(hostport)
+ if err != nil {
+ return "", 0, fmt.Errorf("could not split http address: %v", err)
+ }
+ if host == "" {
+ host = "localhost"
+ }
+ var port int
+ if portStr == "" {
+ ln, err := net.Listen("tcp", net.JoinHostPort(host, "0"))
+ if err != nil {
+ return "", 0, fmt.Errorf("could not generate random port: %v", err)
+ }
+ port = ln.Addr().(*net.TCPAddr).Port
+ err = ln.Close()
+ if err != nil {
+ return "", 0, fmt.Errorf("could not generate random port: %v", err)
+ }
+ } else {
+ port, err = strconv.Atoi(portStr)
+ if err != nil {
+ return "", 0, fmt.Errorf("invalid port number: %v", err)
+ }
+ }
+ return host, port, nil
+}
+func defaultWebServer(args *plugin.HTTPServerArgs) error {
+ ln, err := net.Listen("tcp", args.Hostport)
+ if err != nil {
+ return err
+ }
+ isLocal := isLocalhost(args.Host)
+ handler := http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ if isLocal {
+ // Only allow local clients
+ host, _, err := net.SplitHostPort(req.RemoteAddr)
+ if err != nil || !isLocalhost(host) {
+ http.Error(w, "permission denied", http.StatusForbidden)
+ return
+ }
+ }
+ h := args.Handlers[req.URL.Path]
+ if h == nil {
+ // Fall back to default behavior
+ h = http.DefaultServeMux
+ }
+ h.ServeHTTP(w, req)
+ })
+
+ // We serve the ui at /ui/ and redirect there from the root. This is done
+ // to surface any problems with serving the ui at a non-root early. See:
+ //
+ // https://github.com/google/pprof/pull/348
+ mux := http.NewServeMux()
+ mux.Handle("/ui/", http.StripPrefix("/ui", handler))
+ mux.Handle("/", redirectWithQuery("/ui"))
+ s := &http.Server{Handler: mux}
+ return s.Serve(ln)
+}
+
+func redirectWithQuery(path string) http.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request) {
+ pathWithQuery := &gourl.URL{Path: path, RawQuery: r.URL.RawQuery}
+ http.Redirect(w, r, pathWithQuery.String(), http.StatusTemporaryRedirect)
+ }
+}
+
+func isLocalhost(host string) bool {
+ for _, v := range []string{"localhost", "127.0.0.1", "[::1]", "::1"} {
+ if host == v {
+ return true
+ }
+ }
+ return false
+}
+
+func openBrowser(url string, o *plugin.Options) {
+ // Construct URL.
+ baseURL, _ := gourl.Parse(url)
+ current := currentConfig()
+ u, _ := current.makeURL(*baseURL)
+
+ // Give server a little time to get ready.
+ time.Sleep(time.Millisecond * 500)
+
+ for _, b := range browsers() {
+ args := strings.Split(b, " ")
+ if len(args) == 0 {
+ continue
+ }
+ viewer := exec.Command(args[0], append(args[1:], u.String())...)
+ viewer.Stderr = os.Stderr
+ if err := viewer.Start(); err == nil {
+ return
+ }
+ }
+ // No visualizer succeeded, so just print URL.
+ o.UI.PrintErr(u.String())
+}
+
+// makeReport generates a report for the specified command.
+// If configEditor is not null, it is used to edit the config used for the report.
+func (ui *webInterface) makeReport(w http.ResponseWriter, req *http.Request,
+ cmd []string, configEditor func(*config)) (*report.Report, []string) {
+ cfg := currentConfig()
+ if err := cfg.applyURL(req.URL.Query()); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return nil, nil
+ }
+ if configEditor != nil {
+ configEditor(&cfg)
+ }
+ catcher := &errorCatcher{UI: ui.options.UI}
+ options := *ui.options
+ options.UI = catcher
+ _, rpt, err := generateRawReport(ui.prof, cmd, cfg, &options)
+ if err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return nil, nil
+ }
+ return rpt, catcher.errors
+}
+
+// render generates html using the named template based on the contents of data.
+func (ui *webInterface) render(w http.ResponseWriter, req *http.Request, tmpl string,
+ rpt *report.Report, errList, legend []string, data webArgs) {
+ file := getFromLegend(legend, "File: ", "unknown")
+ profile := getFromLegend(legend, "Type: ", "unknown")
+ data.Title = file + " " + profile
+ data.Errors = errList
+ data.Total = rpt.Total()
+ data.SampleTypes = sampleTypes(ui.prof)
+ data.Legend = legend
+ data.Help = ui.help
+ data.Configs = configMenu(ui.settingsFile, *req.URL)
+
+ html := &bytes.Buffer{}
+ if err := ui.templates.ExecuteTemplate(html, tmpl, data); err != nil {
+ http.Error(w, "internal template error", http.StatusInternalServerError)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+ w.Header().Set("Content-Type", "text/html")
+ w.Write(html.Bytes())
+}
+
+// dot generates a web page containing an svg diagram.
+func (ui *webInterface) dot(w http.ResponseWriter, req *http.Request) {
+ rpt, errList := ui.makeReport(w, req, []string{"svg"}, nil)
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Generate dot graph.
+ g, config := report.GetDOT(rpt)
+ legend := config.Labels
+ config.Labels = nil
+ dot := &bytes.Buffer{}
+ graph.ComposeDot(dot, g, &graph.DotAttributes{}, config)
+
+ // Convert to svg.
+ svg, err := dotToSvg(dot.Bytes())
+ if err != nil {
+ http.Error(w, "Could not execute dot; may need to install graphviz.",
+ http.StatusNotImplemented)
+ ui.options.UI.PrintErr("Failed to execute dot. Is Graphviz installed?\n", err)
+ return
+ }
+
+ // Get all node names into an array.
+ nodes := []string{""} // dot starts with node numbered 1
+ for _, n := range g.Nodes {
+ nodes = append(nodes, n.Info.Name)
+ }
+
+ ui.render(w, req, "graph", rpt, errList, legend, webArgs{
+ HTMLBody: template.HTML(string(svg)),
+ Nodes: nodes,
+ })
+}
+
+func dotToSvg(dot []byte) ([]byte, error) {
+ cmd := exec.Command("dot", "-Tsvg")
+ out := &bytes.Buffer{}
+ cmd.Stdin, cmd.Stdout, cmd.Stderr = bytes.NewBuffer(dot), out, os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, err
+ }
+
+ // Fix dot bug related to unquoted ampersands.
+ svg := bytes.Replace(out.Bytes(), []byte("&;"), []byte("&amp;;"), -1)
+
+ // Cleanup for embedding by dropping stuff before the <svg> start.
+ if pos := bytes.Index(svg, []byte("<svg")); pos >= 0 {
+ svg = svg[pos:]
+ }
+ return svg, nil
+}
+
+func (ui *webInterface) top(w http.ResponseWriter, req *http.Request) {
+ rpt, errList := ui.makeReport(w, req, []string{"top"}, func(cfg *config) {
+ cfg.NodeCount = 500
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+ top, legend := report.TextItems(rpt)
+ var nodes []string
+ for _, item := range top {
+ nodes = append(nodes, item.Name)
+ }
+
+ ui.render(w, req, "top", rpt, errList, legend, webArgs{
+ Top: top,
+ Nodes: nodes,
+ })
+}
+
+// disasm generates a web page containing disassembly.
+func (ui *webInterface) disasm(w http.ResponseWriter, req *http.Request) {
+ args := []string{"disasm", req.URL.Query().Get("f")}
+ rpt, errList := ui.makeReport(w, req, args, nil)
+ if rpt == nil {
+ return // error already reported
+ }
+
+ out := &bytes.Buffer{}
+ if err := report.PrintAssembly(out, rpt, ui.options.Obj, maxEntries); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ legend := report.ProfileLabels(rpt)
+ ui.render(w, req, "plaintext", rpt, errList, legend, webArgs{
+ TextBody: out.String(),
+ })
+
+}
+
+// source generates a web page containing source code annotated with profile
+// data.
+func (ui *webInterface) source(w http.ResponseWriter, req *http.Request) {
+ args := []string{"weblist", req.URL.Query().Get("f")}
+ rpt, errList := ui.makeReport(w, req, args, nil)
+ if rpt == nil {
+ return // error already reported
+ }
+
+ // Generate source listing.
+ var body bytes.Buffer
+ if err := report.PrintWebList(&body, rpt, ui.options.Obj, maxEntries); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ legend := report.ProfileLabels(rpt)
+ ui.render(w, req, "sourcelisting", rpt, errList, legend, webArgs{
+ HTMLBody: template.HTML(body.String()),
+ })
+}
+
+// peek generates a web page listing callers/callers.
+func (ui *webInterface) peek(w http.ResponseWriter, req *http.Request) {
+ args := []string{"peek", req.URL.Query().Get("f")}
+ rpt, errList := ui.makeReport(w, req, args, func(cfg *config) {
+ cfg.Granularity = "lines"
+ })
+ if rpt == nil {
+ return // error already reported
+ }
+
+ out := &bytes.Buffer{}
+ if err := report.Generate(out, rpt, ui.options.Obj); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+
+ legend := report.ProfileLabels(rpt)
+ ui.render(w, req, "plaintext", rpt, errList, legend, webArgs{
+ TextBody: out.String(),
+ })
+}
+
+// saveConfig saves URL configuration.
+func (ui *webInterface) saveConfig(w http.ResponseWriter, req *http.Request) {
+ if err := setConfig(ui.settingsFile, *req.URL); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+}
+
+// deleteConfig deletes a configuration.
+func (ui *webInterface) deleteConfig(w http.ResponseWriter, req *http.Request) {
+ name := req.URL.Query().Get("config")
+ if err := removeConfig(ui.settingsFile, name); err != nil {
+ http.Error(w, err.Error(), http.StatusBadRequest)
+ ui.options.UI.PrintErr(err)
+ return
+ }
+}
+
+// getFromLegend returns the suffix of an entry in legend that starts
+// with param. It returns def if no such entry is found.
+func getFromLegend(legend []string, param, def string) string {
+ for _, s := range legend {
+ if strings.HasPrefix(s, param) {
+ return s[len(param):]
+ }
+ }
+ return def
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go b/src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go
new file mode 100644
index 0000000..d520765
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/elfexec/elfexec.go
@@ -0,0 +1,285 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package elfexec provides utility routines to examine ELF binaries.
+package elfexec
+
+import (
+ "bufio"
+ "debug/elf"
+ "encoding/binary"
+ "fmt"
+ "io"
+)
+
+const (
+ maxNoteSize = 1 << 20 // in bytes
+ noteTypeGNUBuildID = 3
+)
+
+// elfNote is the payload of a Note Section in an ELF file.
+type elfNote struct {
+ Name string // Contents of the "name" field, omitting the trailing zero byte.
+ Desc []byte // Contents of the "desc" field.
+ Type uint32 // Contents of the "type" field.
+}
+
+// parseNotes returns the notes from a SHT_NOTE section or PT_NOTE segment.
+func parseNotes(reader io.Reader, alignment int, order binary.ByteOrder) ([]elfNote, error) {
+ r := bufio.NewReader(reader)
+
+ // padding returns the number of bytes required to pad the given size to an
+ // alignment boundary.
+ padding := func(size int) int {
+ return ((size + (alignment - 1)) &^ (alignment - 1)) - size
+ }
+
+ var notes []elfNote
+ for {
+ noteHeader := make([]byte, 12) // 3 4-byte words
+ if _, err := io.ReadFull(r, noteHeader); err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+ namesz := order.Uint32(noteHeader[0:4])
+ descsz := order.Uint32(noteHeader[4:8])
+ typ := order.Uint32(noteHeader[8:12])
+
+ if uint64(namesz) > uint64(maxNoteSize) {
+ return nil, fmt.Errorf("note name too long (%d bytes)", namesz)
+ }
+ var name string
+ if namesz > 0 {
+ // Documentation differs as to whether namesz is meant to include the
+ // trailing zero, but everyone agrees that name is null-terminated.
+ // So we'll just determine the actual length after the fact.
+ var err error
+ name, err = r.ReadString('\x00')
+ if err == io.EOF {
+ return nil, fmt.Errorf("missing note name (want %d bytes)", namesz)
+ } else if err != nil {
+ return nil, err
+ }
+ namesz = uint32(len(name))
+ name = name[:len(name)-1]
+ }
+
+ // Drop padding bytes until the desc field.
+ for n := padding(len(noteHeader) + int(namesz)); n > 0; n-- {
+ if _, err := r.ReadByte(); err == io.EOF {
+ return nil, fmt.Errorf(
+ "missing %d bytes of padding after note name", n)
+ } else if err != nil {
+ return nil, err
+ }
+ }
+
+ if uint64(descsz) > uint64(maxNoteSize) {
+ return nil, fmt.Errorf("note desc too long (%d bytes)", descsz)
+ }
+ desc := make([]byte, int(descsz))
+ if _, err := io.ReadFull(r, desc); err == io.EOF {
+ return nil, fmt.Errorf("missing desc (want %d bytes)", len(desc))
+ } else if err != nil {
+ return nil, err
+ }
+
+ notes = append(notes, elfNote{Name: name, Desc: desc, Type: typ})
+
+ // Drop padding bytes until the next note or the end of the section,
+ // whichever comes first.
+ for n := padding(len(desc)); n > 0; n-- {
+ if _, err := r.ReadByte(); err == io.EOF {
+ // We hit the end of the section before an alignment boundary.
+ // This can happen if this section is at the end of the file or the next
+ // section has a smaller alignment requirement.
+ break
+ } else if err != nil {
+ return nil, err
+ }
+ }
+ }
+ return notes, nil
+}
+
+// GetBuildID returns the GNU build-ID for an ELF binary.
+//
+// If no build-ID was found but the binary was read without error, it returns
+// (nil, nil).
+func GetBuildID(binary io.ReaderAt) ([]byte, error) {
+ f, err := elf.NewFile(binary)
+ if err != nil {
+ return nil, err
+ }
+
+ findBuildID := func(notes []elfNote) ([]byte, error) {
+ var buildID []byte
+ for _, note := range notes {
+ if note.Name == "GNU" && note.Type == noteTypeGNUBuildID {
+ if buildID == nil {
+ buildID = note.Desc
+ } else {
+ return nil, fmt.Errorf("multiple build ids found, don't know which to use")
+ }
+ }
+ }
+ return buildID, nil
+ }
+
+ for _, p := range f.Progs {
+ if p.Type != elf.PT_NOTE {
+ continue
+ }
+ notes, err := parseNotes(p.Open(), int(p.Align), f.ByteOrder)
+ if err != nil {
+ return nil, err
+ }
+ if b, err := findBuildID(notes); b != nil || err != nil {
+ return b, err
+ }
+ }
+ for _, s := range f.Sections {
+ if s.Type != elf.SHT_NOTE {
+ continue
+ }
+ notes, err := parseNotes(s.Open(), int(s.Addralign), f.ByteOrder)
+ if err != nil {
+ return nil, err
+ }
+ if b, err := findBuildID(notes); b != nil || err != nil {
+ return b, err
+ }
+ }
+ return nil, nil
+}
+
+// GetBase determines the base address to subtract from virtual
+// address to get symbol table address. For an executable, the base
+// is 0. Otherwise, it's a shared library, and the base is the
+// address where the mapping starts. The kernel is special, and may
+// use the address of the _stext symbol as the mmap start. _stext
+// offset can be obtained with `nm vmlinux | grep _stext`
+func GetBase(fh *elf.FileHeader, loadSegment *elf.ProgHeader, stextOffset *uint64, start, limit, offset uint64) (uint64, error) {
+ const (
+ pageSize = 4096
+ // PAGE_OFFSET for PowerPC64, see arch/powerpc/Kconfig in the kernel sources.
+ pageOffsetPpc64 = 0xc000000000000000
+ )
+
+ if start == 0 && offset == 0 && (limit == ^uint64(0) || limit == 0) {
+ // Some tools may introduce a fake mapping that spans the entire
+ // address space. Assume that the address has already been
+ // adjusted, so no additional base adjustment is necessary.
+ return 0, nil
+ }
+
+ switch fh.Type {
+ case elf.ET_EXEC:
+ if loadSegment == nil {
+ // Assume fixed-address executable and so no adjustment.
+ return 0, nil
+ }
+ if stextOffset == nil && start > 0 && start < 0x8000000000000000 {
+ // A regular user-mode executable. Compute the base offset using same
+ // arithmetics as in ET_DYN case below, see the explanation there.
+ // Ideally, the condition would just be "stextOffset == nil" as that
+ // represents the address of _stext symbol in the vmlinux image. Alas,
+ // the caller may skip reading it from the binary (it's expensive to scan
+ // all the symbols) and so it may be nil even for the kernel executable.
+ // So additionally check that the start is within the user-mode half of
+ // the 64-bit address space.
+ return start - offset + loadSegment.Off - loadSegment.Vaddr, nil
+ }
+ // Various kernel heuristics and cases follow.
+ if loadSegment.Vaddr == start-offset {
+ return offset, nil
+ }
+ if start == 0 && limit != 0 {
+ // ChromeOS remaps its kernel to 0. Nothing else should come
+ // down this path. Empirical values:
+ // VADDR=0xffffffff80200000
+ // stextOffset=0xffffffff80200198
+ if stextOffset != nil {
+ return -*stextOffset, nil
+ }
+ return -loadSegment.Vaddr, nil
+ }
+ if start >= loadSegment.Vaddr && limit > start && (offset == 0 || offset == pageOffsetPpc64 || offset == start) {
+ // Some kernels look like:
+ // VADDR=0xffffffff80200000
+ // stextOffset=0xffffffff80200198
+ // Start=0xffffffff83200000
+ // Limit=0xffffffff84200000
+ // Offset=0 (0xc000000000000000 for PowerPC64) (== Start for ASLR kernel)
+ // So the base should be:
+ if stextOffset != nil && (start%pageSize) == (*stextOffset%pageSize) {
+ // perf uses the address of _stext as start. Some tools may
+ // adjust for this before calling GetBase, in which case the page
+ // alignment should be different from that of stextOffset.
+ return start - *stextOffset, nil
+ }
+
+ return start - loadSegment.Vaddr, nil
+ } else if start%pageSize != 0 && stextOffset != nil && *stextOffset%pageSize == start%pageSize {
+ // ChromeOS remaps its kernel to 0 + start%pageSize. Nothing
+ // else should come down this path. Empirical values:
+ // start=0x198 limit=0x2f9fffff offset=0
+ // VADDR=0xffffffff81000000
+ // stextOffset=0xffffffff81000198
+ return start - *stextOffset, nil
+ }
+
+ return 0, fmt.Errorf("don't know how to handle EXEC segment: %v start=0x%x limit=0x%x offset=0x%x", *loadSegment, start, limit, offset)
+ case elf.ET_REL:
+ if offset != 0 {
+ return 0, fmt.Errorf("don't know how to handle mapping.Offset")
+ }
+ return start, nil
+ case elf.ET_DYN:
+ // The process mapping information, start = start of virtual address range,
+ // and offset = offset in the executable file of the start address, tells us
+ // that a runtime virtual address x maps to a file offset
+ // fx = x - start + offset.
+ if loadSegment == nil {
+ return start - offset, nil
+ }
+ // The program header, if not nil, indicates the offset in the file where
+ // the executable segment is located (loadSegment.Off), and the base virtual
+ // address where the first byte of the segment is loaded
+ // (loadSegment.Vaddr). A file offset fx maps to a virtual (symbol) address
+ // sx = fx - loadSegment.Off + loadSegment.Vaddr.
+ //
+ // Thus, a runtime virtual address x maps to a symbol address
+ // sx = x - start + offset - loadSegment.Off + loadSegment.Vaddr.
+ return start - offset + loadSegment.Off - loadSegment.Vaddr, nil
+ }
+ return 0, fmt.Errorf("don't know how to handle FileHeader.Type %v", fh.Type)
+}
+
+// FindTextProgHeader finds the program segment header containing the .text
+// section or nil if the segment cannot be found.
+func FindTextProgHeader(f *elf.File) *elf.ProgHeader {
+ for _, s := range f.Sections {
+ if s.Name == ".text" {
+ // Find the LOAD segment containing the .text section.
+ for _, p := range f.Progs {
+ if p.Type == elf.PT_LOAD && p.Flags&elf.PF_X != 0 && s.Addr >= p.Vaddr && s.Addr < p.Vaddr+p.Memsz {
+ return &p.ProgHeader
+ }
+ }
+ }
+ }
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go b/src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go
new file mode 100644
index 0000000..8cb87da
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/graph/dotgraph.go
@@ -0,0 +1,491 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package graph
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "path/filepath"
+ "strings"
+
+ "github.com/google/pprof/internal/measurement"
+)
+
+// DotAttributes contains details about the graph itself, giving
+// insight into how its elements should be rendered.
+type DotAttributes struct {
+ Nodes map[*Node]*DotNodeAttributes // A map allowing each Node to have its own visualization option
+}
+
+// DotNodeAttributes contains Node specific visualization options.
+type DotNodeAttributes struct {
+ Shape string // The optional shape of the node when rendered visually
+ Bold bool // If the node should be bold or not
+ Peripheries int // An optional number of borders to place around a node
+ URL string // An optional url link to add to a node
+ Formatter func(*NodeInfo) string // An optional formatter for the node's label
+}
+
+// DotConfig contains attributes about how a graph should be
+// constructed and how it should look.
+type DotConfig struct {
+ Title string // The title of the DOT graph
+ LegendURL string // The URL to link to from the legend.
+ Labels []string // The labels for the DOT's legend
+
+ FormatValue func(int64) string // A formatting function for values
+ Total int64 // The total weight of the graph, used to compute percentages
+}
+
+const maxNodelets = 4 // Number of nodelets for labels (both numeric and non)
+
+// ComposeDot creates and writes a in the DOT format to the writer, using
+// the configurations given.
+func ComposeDot(w io.Writer, g *Graph, a *DotAttributes, c *DotConfig) {
+ builder := &builder{w, a, c}
+
+ // Begin constructing DOT by adding a title and legend.
+ builder.start()
+ defer builder.finish()
+ builder.addLegend()
+
+ if len(g.Nodes) == 0 {
+ return
+ }
+
+ // Preprocess graph to get id map and find max flat.
+ nodeIDMap := make(map[*Node]int)
+ hasNodelets := make(map[*Node]bool)
+
+ maxFlat := float64(abs64(g.Nodes[0].FlatValue()))
+ for i, n := range g.Nodes {
+ nodeIDMap[n] = i + 1
+ if float64(abs64(n.FlatValue())) > maxFlat {
+ maxFlat = float64(abs64(n.FlatValue()))
+ }
+ }
+
+ edges := EdgeMap{}
+
+ // Add nodes and nodelets to DOT builder.
+ for _, n := range g.Nodes {
+ builder.addNode(n, nodeIDMap[n], maxFlat)
+ hasNodelets[n] = builder.addNodelets(n, nodeIDMap[n])
+
+ // Collect all edges. Use a fake node to support multiple incoming edges.
+ for _, e := range n.Out {
+ edges[&Node{}] = e
+ }
+ }
+
+ // Add edges to DOT builder. Sort edges by frequency as a hint to the graph layout engine.
+ for _, e := range edges.Sort() {
+ builder.addEdge(e, nodeIDMap[e.Src], nodeIDMap[e.Dest], hasNodelets[e.Src])
+ }
+}
+
+// builder wraps an io.Writer and understands how to compose DOT formatted elements.
+type builder struct {
+ io.Writer
+ attributes *DotAttributes
+ config *DotConfig
+}
+
+// start generates a title and initial node in DOT format.
+func (b *builder) start() {
+ graphname := "unnamed"
+ if b.config.Title != "" {
+ graphname = b.config.Title
+ }
+ fmt.Fprintln(b, `digraph "`+graphname+`" {`)
+ fmt.Fprintln(b, `node [style=filled fillcolor="#f8f8f8"]`)
+}
+
+// finish closes the opening curly bracket in the constructed DOT buffer.
+func (b *builder) finish() {
+ fmt.Fprintln(b, "}")
+}
+
+// addLegend generates a legend in DOT format.
+func (b *builder) addLegend() {
+ labels := b.config.Labels
+ if len(labels) == 0 {
+ return
+ }
+ title := labels[0]
+ fmt.Fprintf(b, `subgraph cluster_L { "%s" [shape=box fontsize=16`, title)
+ fmt.Fprintf(b, ` label="%s\l"`, strings.Join(escapeAllForDot(labels), `\l`))
+ if b.config.LegendURL != "" {
+ fmt.Fprintf(b, ` URL="%s" target="_blank"`, b.config.LegendURL)
+ }
+ if b.config.Title != "" {
+ fmt.Fprintf(b, ` tooltip="%s"`, b.config.Title)
+ }
+ fmt.Fprintf(b, "] }\n")
+}
+
+// addNode generates a graph node in DOT format.
+func (b *builder) addNode(node *Node, nodeID int, maxFlat float64) {
+ flat, cum := node.FlatValue(), node.CumValue()
+ attrs := b.attributes.Nodes[node]
+
+ // Populate label for node.
+ var label string
+ if attrs != nil && attrs.Formatter != nil {
+ label = attrs.Formatter(&node.Info)
+ } else {
+ label = multilinePrintableName(&node.Info)
+ }
+
+ flatValue := b.config.FormatValue(flat)
+ if flat != 0 {
+ label = label + fmt.Sprintf(`%s (%s)`,
+ flatValue,
+ strings.TrimSpace(measurement.Percentage(flat, b.config.Total)))
+ } else {
+ label = label + "0"
+ }
+ cumValue := flatValue
+ if cum != flat {
+ if flat != 0 {
+ label = label + `\n`
+ } else {
+ label = label + " "
+ }
+ cumValue = b.config.FormatValue(cum)
+ label = label + fmt.Sprintf(`of %s (%s)`,
+ cumValue,
+ strings.TrimSpace(measurement.Percentage(cum, b.config.Total)))
+ }
+
+ // Scale font sizes from 8 to 24 based on percentage of flat frequency.
+ // Use non linear growth to emphasize the size difference.
+ baseFontSize, maxFontGrowth := 8, 16.0
+ fontSize := baseFontSize
+ if maxFlat != 0 && flat != 0 && float64(abs64(flat)) <= maxFlat {
+ fontSize += int(math.Ceil(maxFontGrowth * math.Sqrt(float64(abs64(flat))/maxFlat)))
+ }
+
+ // Determine node shape.
+ shape := "box"
+ if attrs != nil && attrs.Shape != "" {
+ shape = attrs.Shape
+ }
+
+ // Create DOT attribute for node.
+ attr := fmt.Sprintf(`label="%s" id="node%d" fontsize=%d shape=%s tooltip="%s (%s)" color="%s" fillcolor="%s"`,
+ label, nodeID, fontSize, shape, escapeForDot(node.Info.PrintableName()), cumValue,
+ dotColor(float64(node.CumValue())/float64(abs64(b.config.Total)), false),
+ dotColor(float64(node.CumValue())/float64(abs64(b.config.Total)), true))
+
+ // Add on extra attributes if provided.
+ if attrs != nil {
+ // Make bold if specified.
+ if attrs.Bold {
+ attr += ` style="bold,filled"`
+ }
+
+ // Add peripheries if specified.
+ if attrs.Peripheries != 0 {
+ attr += fmt.Sprintf(` peripheries=%d`, attrs.Peripheries)
+ }
+
+ // Add URL if specified. target="_blank" forces the link to open in a new tab.
+ if attrs.URL != "" {
+ attr += fmt.Sprintf(` URL="%s" target="_blank"`, attrs.URL)
+ }
+ }
+
+ fmt.Fprintf(b, "N%d [%s]\n", nodeID, attr)
+}
+
+// addNodelets generates the DOT boxes for the node tags if they exist.
+func (b *builder) addNodelets(node *Node, nodeID int) bool {
+ var nodelets string
+
+ // Populate two Tag slices, one for LabelTags and one for NumericTags.
+ var ts []*Tag
+ lnts := make(map[string][]*Tag)
+ for _, t := range node.LabelTags {
+ ts = append(ts, t)
+ }
+ for l, tm := range node.NumericTags {
+ for _, t := range tm {
+ lnts[l] = append(lnts[l], t)
+ }
+ }
+
+ // For leaf nodes, print cumulative tags (includes weight from
+ // children that have been deleted).
+ // For internal nodes, print only flat tags.
+ flatTags := len(node.Out) > 0
+
+ // Select the top maxNodelets alphanumeric labels by weight.
+ SortTags(ts, flatTags)
+ if len(ts) > maxNodelets {
+ ts = ts[:maxNodelets]
+ }
+ for i, t := range ts {
+ w := t.CumValue()
+ if flatTags {
+ w = t.FlatValue()
+ }
+ if w == 0 {
+ continue
+ }
+ weight := b.config.FormatValue(w)
+ nodelets += fmt.Sprintf(`N%d_%d [label = "%s" id="N%d_%d" fontsize=8 shape=box3d tooltip="%s"]`+"\n", nodeID, i, t.Name, nodeID, i, weight)
+ nodelets += fmt.Sprintf(`N%d -> N%d_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"]`+"\n", nodeID, nodeID, i, weight, weight, weight)
+ if nts := lnts[t.Name]; nts != nil {
+ nodelets += b.numericNodelets(nts, maxNodelets, flatTags, fmt.Sprintf(`N%d_%d`, nodeID, i))
+ }
+ }
+
+ if nts := lnts[""]; nts != nil {
+ nodelets += b.numericNodelets(nts, maxNodelets, flatTags, fmt.Sprintf(`N%d`, nodeID))
+ }
+
+ fmt.Fprint(b, nodelets)
+ return nodelets != ""
+}
+
+func (b *builder) numericNodelets(nts []*Tag, maxNumNodelets int, flatTags bool, source string) string {
+ nodelets := ""
+
+ // Collapse numeric labels into maxNumNodelets buckets, of the form:
+ // 1MB..2MB, 3MB..5MB, ...
+ for j, t := range b.collapsedTags(nts, maxNumNodelets, flatTags) {
+ w, attr := t.CumValue(), ` style="dotted"`
+ if flatTags || t.FlatValue() == t.CumValue() {
+ w, attr = t.FlatValue(), ""
+ }
+ if w != 0 {
+ weight := b.config.FormatValue(w)
+ nodelets += fmt.Sprintf(`N%s_%d [label = "%s" id="N%s_%d" fontsize=8 shape=box3d tooltip="%s"]`+"\n", source, j, t.Name, source, j, weight)
+ nodelets += fmt.Sprintf(`%s -> N%s_%d [label=" %s" weight=100 tooltip="%s" labeltooltip="%s"%s]`+"\n", source, source, j, weight, weight, weight, attr)
+ }
+ }
+ return nodelets
+}
+
+// addEdge generates a graph edge in DOT format.
+func (b *builder) addEdge(edge *Edge, from, to int, hasNodelets bool) {
+ var inline string
+ if edge.Inline {
+ inline = `\n (inline)`
+ }
+ w := b.config.FormatValue(edge.WeightValue())
+ attr := fmt.Sprintf(`label=" %s%s"`, w, inline)
+ if b.config.Total != 0 {
+ // Note: edge.weight > b.config.Total is possible for profile diffs.
+ if weight := 1 + int(min64(abs64(edge.WeightValue()*100/b.config.Total), 100)); weight > 1 {
+ attr = fmt.Sprintf(`%s weight=%d`, attr, weight)
+ }
+ if width := 1 + int(min64(abs64(edge.WeightValue()*5/b.config.Total), 5)); width > 1 {
+ attr = fmt.Sprintf(`%s penwidth=%d`, attr, width)
+ }
+ attr = fmt.Sprintf(`%s color="%s"`, attr,
+ dotColor(float64(edge.WeightValue())/float64(abs64(b.config.Total)), false))
+ }
+ arrow := "->"
+ if edge.Residual {
+ arrow = "..."
+ }
+ tooltip := fmt.Sprintf(`"%s %s %s (%s)"`,
+ escapeForDot(edge.Src.Info.PrintableName()), arrow,
+ escapeForDot(edge.Dest.Info.PrintableName()), w)
+ attr = fmt.Sprintf(`%s tooltip=%s labeltooltip=%s`, attr, tooltip, tooltip)
+
+ if edge.Residual {
+ attr = attr + ` style="dotted"`
+ }
+
+ if hasNodelets {
+ // Separate children further if source has tags.
+ attr = attr + " minlen=2"
+ }
+
+ fmt.Fprintf(b, "N%d -> N%d [%s]\n", from, to, attr)
+}
+
+// dotColor returns a color for the given score (between -1.0 and
+// 1.0), with -1.0 colored red, 0.0 colored grey, and 1.0 colored
+// green. If isBackground is true, then a light (low-saturation)
+// color is returned (suitable for use as a background color);
+// otherwise, a darker color is returned (suitable for use as a
+// foreground color).
+func dotColor(score float64, isBackground bool) string {
+ // A float between 0.0 and 1.0, indicating the extent to which
+ // colors should be shifted away from grey (to make positive and
+ // negative values easier to distinguish, and to make more use of
+ // the color range.)
+ const shift = 0.7
+
+ // Saturation and value (in hsv colorspace) for background colors.
+ const bgSaturation = 0.1
+ const bgValue = 0.93
+
+ // Saturation and value (in hsv colorspace) for foreground colors.
+ const fgSaturation = 1.0
+ const fgValue = 0.7
+
+ // Choose saturation and value based on isBackground.
+ var saturation float64
+ var value float64
+ if isBackground {
+ saturation = bgSaturation
+ value = bgValue
+ } else {
+ saturation = fgSaturation
+ value = fgValue
+ }
+
+ // Limit the score values to the range [-1.0, 1.0].
+ score = math.Max(-1.0, math.Min(1.0, score))
+
+ // Reduce saturation near score=0 (so it is colored grey, rather than yellow).
+ if math.Abs(score) < 0.2 {
+ saturation *= math.Abs(score) / 0.2
+ }
+
+ // Apply 'shift' to move scores away from 0.0 (grey).
+ if score > 0.0 {
+ score = math.Pow(score, (1.0 - shift))
+ }
+ if score < 0.0 {
+ score = -math.Pow(-score, (1.0 - shift))
+ }
+
+ var r, g, b float64 // red, green, blue
+ if score < 0.0 {
+ g = value
+ r = value * (1 + saturation*score)
+ } else {
+ r = value
+ g = value * (1 - saturation*score)
+ }
+ b = value * (1 - saturation)
+ return fmt.Sprintf("#%02x%02x%02x", uint8(r*255.0), uint8(g*255.0), uint8(b*255.0))
+}
+
+func multilinePrintableName(info *NodeInfo) string {
+ infoCopy := *info
+ infoCopy.Name = escapeForDot(ShortenFunctionName(infoCopy.Name))
+ infoCopy.Name = strings.Replace(infoCopy.Name, "::", `\n`, -1)
+ infoCopy.Name = strings.Replace(infoCopy.Name, ".", `\n`, -1)
+ if infoCopy.File != "" {
+ infoCopy.File = filepath.Base(infoCopy.File)
+ }
+ return strings.Join(infoCopy.NameComponents(), `\n`) + `\n`
+}
+
+// collapsedTags trims and sorts a slice of tags.
+func (b *builder) collapsedTags(ts []*Tag, count int, flatTags bool) []*Tag {
+ ts = SortTags(ts, flatTags)
+ if len(ts) <= count {
+ return ts
+ }
+
+ tagGroups := make([][]*Tag, count)
+ for i, t := range (ts)[:count] {
+ tagGroups[i] = []*Tag{t}
+ }
+ for _, t := range (ts)[count:] {
+ g, d := 0, tagDistance(t, tagGroups[0][0])
+ for i := 1; i < count; i++ {
+ if nd := tagDistance(t, tagGroups[i][0]); nd < d {
+ g, d = i, nd
+ }
+ }
+ tagGroups[g] = append(tagGroups[g], t)
+ }
+
+ var nts []*Tag
+ for _, g := range tagGroups {
+ l, w, c := b.tagGroupLabel(g)
+ nts = append(nts, &Tag{
+ Name: l,
+ Flat: w,
+ Cum: c,
+ })
+ }
+ return SortTags(nts, flatTags)
+}
+
+func tagDistance(t, u *Tag) float64 {
+ v, _ := measurement.Scale(u.Value, u.Unit, t.Unit)
+ if v < float64(t.Value) {
+ return float64(t.Value) - v
+ }
+ return v - float64(t.Value)
+}
+
+func (b *builder) tagGroupLabel(g []*Tag) (label string, flat, cum int64) {
+ if len(g) == 1 {
+ t := g[0]
+ return measurement.Label(t.Value, t.Unit), t.FlatValue(), t.CumValue()
+ }
+ min := g[0]
+ max := g[0]
+ df, f := min.FlatDiv, min.Flat
+ dc, c := min.CumDiv, min.Cum
+ for _, t := range g[1:] {
+ if v, _ := measurement.Scale(t.Value, t.Unit, min.Unit); int64(v) < min.Value {
+ min = t
+ }
+ if v, _ := measurement.Scale(t.Value, t.Unit, max.Unit); int64(v) > max.Value {
+ max = t
+ }
+ f += t.Flat
+ df += t.FlatDiv
+ c += t.Cum
+ dc += t.CumDiv
+ }
+ if df != 0 {
+ f = f / df
+ }
+ if dc != 0 {
+ c = c / dc
+ }
+
+ // Tags are not scaled with the selected output unit because tags are often
+ // much smaller than other values which appear, so the range of tag sizes
+ // sometimes would appear to be "0..0" when scaled to the selected output unit.
+ return measurement.Label(min.Value, min.Unit) + ".." + measurement.Label(max.Value, max.Unit), f, c
+}
+
+func min64(a, b int64) int64 {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+// escapeAllForDot applies escapeForDot to all strings in the given slice.
+func escapeAllForDot(in []string) []string {
+ var out = make([]string, len(in))
+ for i := range in {
+ out[i] = escapeForDot(in[i])
+ }
+ return out
+}
+
+// escapeForDot escapes double quotes and backslashes, and replaces Graphviz's
+// "center" character (\n) with a left-justified character.
+// See https://graphviz.org/doc/info/attrs.html#k:escString for more info.
+func escapeForDot(str string) string {
+ return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(str, `\`, `\\`), `"`, `\"`), "\n", `\l`)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go b/src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go
new file mode 100644
index 0000000..74b904c
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/graph/graph.go
@@ -0,0 +1,1170 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package graph collects a set of samples into a directed graph.
+package graph
+
+import (
+ "fmt"
+ "math"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/profile"
+)
+
+var (
+ // Removes package name and method arguments for Java method names.
+ // See tests for examples.
+ javaRegExp = regexp.MustCompile(`^(?:[a-z]\w*\.)*([A-Z][\w\$]*\.(?:<init>|[a-z][\w\$]*(?:\$\d+)?))(?:(?:\()|$)`)
+ // Removes package name and method arguments for Go function names.
+ // See tests for examples.
+ goRegExp = regexp.MustCompile(`^(?:[\w\-\.]+\/)+(.+)`)
+ // Removes potential module versions in a package path.
+ goVerRegExp = regexp.MustCompile(`^(.*?)/v(?:[2-9]|[1-9][0-9]+)([./].*)$`)
+ // Strips C++ namespace prefix from a C++ function / method name.
+ // NOTE: Make sure to keep the template parameters in the name. Normally,
+ // template parameters are stripped from the C++ names but when
+ // -symbolize=demangle=templates flag is used, they will not be.
+ // See tests for examples.
+ cppRegExp = regexp.MustCompile(`^(?:[_a-zA-Z]\w*::)+(_*[A-Z]\w*::~?[_a-zA-Z]\w*(?:<.*>)?)`)
+ cppAnonymousPrefixRegExp = regexp.MustCompile(`^\(anonymous namespace\)::`)
+)
+
+// Graph summarizes a performance profile into a format that is
+// suitable for visualization.
+type Graph struct {
+ Nodes Nodes
+}
+
+// Options encodes the options for constructing a graph
+type Options struct {
+ SampleValue func(s []int64) int64 // Function to compute the value of a sample
+ SampleMeanDivisor func(s []int64) int64 // Function to compute the divisor for mean graphs, or nil
+ FormatTag func(int64, string) string // Function to format a sample tag value into a string
+ ObjNames bool // Always preserve obj filename
+ OrigFnNames bool // Preserve original (eg mangled) function names
+
+ CallTree bool // Build a tree instead of a graph
+ DropNegative bool // Drop nodes with overall negative values
+
+ KeptNodes NodeSet // If non-nil, only use nodes in this set
+}
+
+// Nodes is an ordered collection of graph nodes.
+type Nodes []*Node
+
+// Node is an entry on a profiling report. It represents a unique
+// program location.
+type Node struct {
+ // Info describes the source location associated to this node.
+ Info NodeInfo
+
+ // Function represents the function that this node belongs to. On
+ // graphs with sub-function resolution (eg line number or
+ // addresses), two nodes in a NodeMap that are part of the same
+ // function have the same value of Node.Function. If the Node
+ // represents the whole function, it points back to itself.
+ Function *Node
+
+ // Values associated to this node. Flat is exclusive to this node,
+ // Cum includes all descendents.
+ Flat, FlatDiv, Cum, CumDiv int64
+
+ // In and out Contains the nodes immediately reaching or reached by
+ // this node.
+ In, Out EdgeMap
+
+ // LabelTags provide additional information about subsets of a sample.
+ LabelTags TagMap
+
+ // NumericTags provide additional values for subsets of a sample.
+ // Numeric tags are optionally associated to a label tag. The key
+ // for NumericTags is the name of the LabelTag they are associated
+ // to, or "" for numeric tags not associated to a label tag.
+ NumericTags map[string]TagMap
+}
+
+// FlatValue returns the exclusive value for this node, computing the
+// mean if a divisor is available.
+func (n *Node) FlatValue() int64 {
+ if n.FlatDiv == 0 {
+ return n.Flat
+ }
+ return n.Flat / n.FlatDiv
+}
+
+// CumValue returns the inclusive value for this node, computing the
+// mean if a divisor is available.
+func (n *Node) CumValue() int64 {
+ if n.CumDiv == 0 {
+ return n.Cum
+ }
+ return n.Cum / n.CumDiv
+}
+
+// AddToEdge increases the weight of an edge between two nodes. If
+// there isn't such an edge one is created.
+func (n *Node) AddToEdge(to *Node, v int64, residual, inline bool) {
+ n.AddToEdgeDiv(to, 0, v, residual, inline)
+}
+
+// AddToEdgeDiv increases the weight of an edge between two nodes. If
+// there isn't such an edge one is created.
+func (n *Node) AddToEdgeDiv(to *Node, dv, v int64, residual, inline bool) {
+ if n.Out[to] != to.In[n] {
+ panic(fmt.Errorf("asymmetric edges %v %v", *n, *to))
+ }
+
+ if e := n.Out[to]; e != nil {
+ e.WeightDiv += dv
+ e.Weight += v
+ if residual {
+ e.Residual = true
+ }
+ if !inline {
+ e.Inline = false
+ }
+ return
+ }
+
+ info := &Edge{Src: n, Dest: to, WeightDiv: dv, Weight: v, Residual: residual, Inline: inline}
+ n.Out[to] = info
+ to.In[n] = info
+}
+
+// NodeInfo contains the attributes for a node.
+type NodeInfo struct {
+ Name string
+ OrigName string
+ Address uint64
+ File string
+ StartLine, Lineno int
+ Objfile string
+}
+
+// PrintableName calls the Node's Formatter function with a single space separator.
+func (i *NodeInfo) PrintableName() string {
+ return strings.Join(i.NameComponents(), " ")
+}
+
+// NameComponents returns the components of the printable name to be used for a node.
+func (i *NodeInfo) NameComponents() []string {
+ var name []string
+ if i.Address != 0 {
+ name = append(name, fmt.Sprintf("%016x", i.Address))
+ }
+ if fun := i.Name; fun != "" {
+ name = append(name, fun)
+ }
+
+ switch {
+ case i.Lineno != 0:
+ // User requested line numbers, provide what we have.
+ name = append(name, fmt.Sprintf("%s:%d", i.File, i.Lineno))
+ case i.File != "":
+ // User requested file name, provide it.
+ name = append(name, i.File)
+ case i.Name != "":
+ // User requested function name. It was already included.
+ case i.Objfile != "":
+ // Only binary name is available
+ name = append(name, "["+filepath.Base(i.Objfile)+"]")
+ default:
+ // Do not leave it empty if there is no information at all.
+ name = append(name, "<unknown>")
+ }
+ return name
+}
+
+// NodeMap maps from a node info struct to a node. It is used to merge
+// report entries with the same info.
+type NodeMap map[NodeInfo]*Node
+
+// NodeSet is a collection of node info structs.
+type NodeSet map[NodeInfo]bool
+
+// NodePtrSet is a collection of nodes. Trimming a graph or tree requires a set
+// of objects which uniquely identify the nodes to keep. In a graph, NodeInfo
+// works as a unique identifier; however, in a tree multiple nodes may share
+// identical NodeInfos. A *Node does uniquely identify a node so we can use that
+// instead. Though a *Node also uniquely identifies a node in a graph,
+// currently, during trimming, graphs are rebuilt from scratch using only the
+// NodeSet, so there would not be the required context of the initial graph to
+// allow for the use of *Node.
+type NodePtrSet map[*Node]bool
+
+// FindOrInsertNode takes the info for a node and either returns a matching node
+// from the node map if one exists, or adds one to the map if one does not.
+// If kept is non-nil, nodes are only added if they can be located on it.
+func (nm NodeMap) FindOrInsertNode(info NodeInfo, kept NodeSet) *Node {
+ if kept != nil {
+ if _, ok := kept[info]; !ok {
+ return nil
+ }
+ }
+
+ if n, ok := nm[info]; ok {
+ return n
+ }
+
+ n := &Node{
+ Info: info,
+ In: make(EdgeMap),
+ Out: make(EdgeMap),
+ LabelTags: make(TagMap),
+ NumericTags: make(map[string]TagMap),
+ }
+ nm[info] = n
+ if info.Address == 0 && info.Lineno == 0 {
+ // This node represents the whole function, so point Function
+ // back to itself.
+ n.Function = n
+ return n
+ }
+ // Find a node that represents the whole function.
+ info.Address = 0
+ info.Lineno = 0
+ n.Function = nm.FindOrInsertNode(info, nil)
+ return n
+}
+
+// EdgeMap is used to represent the incoming/outgoing edges from a node.
+type EdgeMap map[*Node]*Edge
+
+// Edge contains any attributes to be represented about edges in a graph.
+type Edge struct {
+ Src, Dest *Node
+ // The summary weight of the edge
+ Weight, WeightDiv int64
+
+ // residual edges connect nodes that were connected through a
+ // separate node, which has been removed from the report.
+ Residual bool
+ // An inline edge represents a call that was inlined into the caller.
+ Inline bool
+}
+
+// WeightValue returns the weight value for this edge, normalizing if a
+// divisor is available.
+func (e *Edge) WeightValue() int64 {
+ if e.WeightDiv == 0 {
+ return e.Weight
+ }
+ return e.Weight / e.WeightDiv
+}
+
+// Tag represent sample annotations
+type Tag struct {
+ Name string
+ Unit string // Describe the value, "" for non-numeric tags
+ Value int64
+ Flat, FlatDiv int64
+ Cum, CumDiv int64
+}
+
+// FlatValue returns the exclusive value for this tag, computing the
+// mean if a divisor is available.
+func (t *Tag) FlatValue() int64 {
+ if t.FlatDiv == 0 {
+ return t.Flat
+ }
+ return t.Flat / t.FlatDiv
+}
+
+// CumValue returns the inclusive value for this tag, computing the
+// mean if a divisor is available.
+func (t *Tag) CumValue() int64 {
+ if t.CumDiv == 0 {
+ return t.Cum
+ }
+ return t.Cum / t.CumDiv
+}
+
+// TagMap is a collection of tags, classified by their name.
+type TagMap map[string]*Tag
+
+// SortTags sorts a slice of tags based on their weight.
+func SortTags(t []*Tag, flat bool) []*Tag {
+ ts := tags{t, flat}
+ sort.Sort(ts)
+ return ts.t
+}
+
+// New summarizes performance data from a profile into a graph.
+func New(prof *profile.Profile, o *Options) *Graph {
+ if o.CallTree {
+ return newTree(prof, o)
+ }
+ g, _ := newGraph(prof, o)
+ return g
+}
+
+// newGraph computes a graph from a profile. It returns the graph, and
+// a map from the profile location indices to the corresponding graph
+// nodes.
+func newGraph(prof *profile.Profile, o *Options) (*Graph, map[uint64]Nodes) {
+ nodes, locationMap := CreateNodes(prof, o)
+ seenNode := make(map[*Node]bool)
+ seenEdge := make(map[nodePair]bool)
+ for _, sample := range prof.Sample {
+ var w, dw int64
+ w = o.SampleValue(sample.Value)
+ if o.SampleMeanDivisor != nil {
+ dw = o.SampleMeanDivisor(sample.Value)
+ }
+ if dw == 0 && w == 0 {
+ continue
+ }
+ for k := range seenNode {
+ delete(seenNode, k)
+ }
+ for k := range seenEdge {
+ delete(seenEdge, k)
+ }
+ var parent *Node
+ // A residual edge goes over one or more nodes that were not kept.
+ residual := false
+
+ labels := joinLabels(sample)
+ // Group the sample frames, based on a global map.
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ l := sample.Location[i]
+ locNodes := locationMap[l.ID]
+ for ni := len(locNodes) - 1; ni >= 0; ni-- {
+ n := locNodes[ni]
+ if n == nil {
+ residual = true
+ continue
+ }
+ // Add cum weight to all nodes in stack, avoiding double counting.
+ if _, ok := seenNode[n]; !ok {
+ seenNode[n] = true
+ n.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, false)
+ }
+ // Update edge weights for all edges in stack, avoiding double counting.
+ if _, ok := seenEdge[nodePair{n, parent}]; !ok && parent != nil && n != parent {
+ seenEdge[nodePair{n, parent}] = true
+ parent.AddToEdgeDiv(n, dw, w, residual, ni != len(locNodes)-1)
+ }
+ parent = n
+ residual = false
+ }
+ }
+ if parent != nil && !residual {
+ // Add flat weight to leaf node.
+ parent.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, true)
+ }
+ }
+
+ return selectNodesForGraph(nodes, o.DropNegative), locationMap
+}
+
+func selectNodesForGraph(nodes Nodes, dropNegative bool) *Graph {
+ // Collect nodes into a graph.
+ gNodes := make(Nodes, 0, len(nodes))
+ for _, n := range nodes {
+ if n == nil {
+ continue
+ }
+ if n.Cum == 0 && n.Flat == 0 {
+ continue
+ }
+ if dropNegative && isNegative(n) {
+ continue
+ }
+ gNodes = append(gNodes, n)
+ }
+ return &Graph{gNodes}
+}
+
+type nodePair struct {
+ src, dest *Node
+}
+
+func newTree(prof *profile.Profile, o *Options) (g *Graph) {
+ parentNodeMap := make(map[*Node]NodeMap, len(prof.Sample))
+ for _, sample := range prof.Sample {
+ var w, dw int64
+ w = o.SampleValue(sample.Value)
+ if o.SampleMeanDivisor != nil {
+ dw = o.SampleMeanDivisor(sample.Value)
+ }
+ if dw == 0 && w == 0 {
+ continue
+ }
+ var parent *Node
+ labels := joinLabels(sample)
+ // Group the sample frames, based on a per-node map.
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ l := sample.Location[i]
+ lines := l.Line
+ if len(lines) == 0 {
+ lines = []profile.Line{{}} // Create empty line to include location info.
+ }
+ for lidx := len(lines) - 1; lidx >= 0; lidx-- {
+ nodeMap := parentNodeMap[parent]
+ if nodeMap == nil {
+ nodeMap = make(NodeMap)
+ parentNodeMap[parent] = nodeMap
+ }
+ n := nodeMap.findOrInsertLine(l, lines[lidx], o)
+ if n == nil {
+ continue
+ }
+ n.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, false)
+ if parent != nil {
+ parent.AddToEdgeDiv(n, dw, w, false, lidx != len(lines)-1)
+ }
+ parent = n
+ }
+ }
+ if parent != nil {
+ parent.addSample(dw, w, labels, sample.NumLabel, sample.NumUnit, o.FormatTag, true)
+ }
+ }
+
+ nodes := make(Nodes, len(prof.Location))
+ for _, nm := range parentNodeMap {
+ nodes = append(nodes, nm.nodes()...)
+ }
+ return selectNodesForGraph(nodes, o.DropNegative)
+}
+
+// ShortenFunctionName returns a shortened version of a function's name.
+func ShortenFunctionName(f string) string {
+ f = cppAnonymousPrefixRegExp.ReplaceAllString(f, "")
+ f = goVerRegExp.ReplaceAllString(f, `${1}${2}`)
+ for _, re := range []*regexp.Regexp{goRegExp, javaRegExp, cppRegExp} {
+ if matches := re.FindStringSubmatch(f); len(matches) >= 2 {
+ return strings.Join(matches[1:], "")
+ }
+ }
+ return f
+}
+
+// TrimTree trims a Graph in forest form, keeping only the nodes in kept. This
+// will not work correctly if even a single node has multiple parents.
+func (g *Graph) TrimTree(kept NodePtrSet) {
+ // Creates a new list of nodes
+ oldNodes := g.Nodes
+ g.Nodes = make(Nodes, 0, len(kept))
+
+ for _, cur := range oldNodes {
+ // A node may not have multiple parents
+ if len(cur.In) > 1 {
+ panic("TrimTree only works on trees")
+ }
+
+ // If a node should be kept, add it to the new list of nodes
+ if _, ok := kept[cur]; ok {
+ g.Nodes = append(g.Nodes, cur)
+ continue
+ }
+
+ // If a node has no parents, then delete all of the in edges of its
+ // children to make them each roots of their own trees.
+ if len(cur.In) == 0 {
+ for _, outEdge := range cur.Out {
+ delete(outEdge.Dest.In, cur)
+ }
+ continue
+ }
+
+ // Get the parent. This works since at this point cur.In must contain only
+ // one element.
+ if len(cur.In) != 1 {
+ panic("Get parent assertion failed. cur.In expected to be of length 1.")
+ }
+ var parent *Node
+ for _, edge := range cur.In {
+ parent = edge.Src
+ }
+
+ parentEdgeInline := parent.Out[cur].Inline
+
+ // Remove the edge from the parent to this node
+ delete(parent.Out, cur)
+
+ // Reconfigure every edge from the current node to now begin at the parent.
+ for _, outEdge := range cur.Out {
+ child := outEdge.Dest
+
+ delete(child.In, cur)
+ child.In[parent] = outEdge
+ parent.Out[child] = outEdge
+
+ outEdge.Src = parent
+ outEdge.Residual = true
+ // If the edge from the parent to the current node and the edge from the
+ // current node to the child are both inline, then this resulting residual
+ // edge should also be inline
+ outEdge.Inline = parentEdgeInline && outEdge.Inline
+ }
+ }
+ g.RemoveRedundantEdges()
+}
+
+func joinLabels(s *profile.Sample) string {
+ if len(s.Label) == 0 {
+ return ""
+ }
+
+ var labels []string
+ for key, vals := range s.Label {
+ for _, v := range vals {
+ labels = append(labels, key+":"+v)
+ }
+ }
+ sort.Strings(labels)
+ return strings.Join(labels, `\n`)
+}
+
+// isNegative returns true if the node is considered as "negative" for the
+// purposes of drop_negative.
+func isNegative(n *Node) bool {
+ switch {
+ case n.Flat < 0:
+ return true
+ case n.Flat == 0 && n.Cum < 0:
+ return true
+ default:
+ return false
+ }
+}
+
+// CreateNodes creates graph nodes for all locations in a profile. It
+// returns set of all nodes, plus a mapping of each location to the
+// set of corresponding nodes (one per location.Line).
+func CreateNodes(prof *profile.Profile, o *Options) (Nodes, map[uint64]Nodes) {
+ locations := make(map[uint64]Nodes, len(prof.Location))
+ nm := make(NodeMap, len(prof.Location))
+ for _, l := range prof.Location {
+ lines := l.Line
+ if len(lines) == 0 {
+ lines = []profile.Line{{}} // Create empty line to include location info.
+ }
+ nodes := make(Nodes, len(lines))
+ for ln := range lines {
+ nodes[ln] = nm.findOrInsertLine(l, lines[ln], o)
+ }
+ locations[l.ID] = nodes
+ }
+ return nm.nodes(), locations
+}
+
+func (nm NodeMap) nodes() Nodes {
+ nodes := make(Nodes, 0, len(nm))
+ for _, n := range nm {
+ nodes = append(nodes, n)
+ }
+ return nodes
+}
+
+func (nm NodeMap) findOrInsertLine(l *profile.Location, li profile.Line, o *Options) *Node {
+ var objfile string
+ if m := l.Mapping; m != nil && m.File != "" {
+ objfile = m.File
+ }
+
+ if ni := nodeInfo(l, li, objfile, o); ni != nil {
+ return nm.FindOrInsertNode(*ni, o.KeptNodes)
+ }
+ return nil
+}
+
+func nodeInfo(l *profile.Location, line profile.Line, objfile string, o *Options) *NodeInfo {
+ if line.Function == nil {
+ return &NodeInfo{Address: l.Address, Objfile: objfile}
+ }
+ ni := &NodeInfo{
+ Address: l.Address,
+ Lineno: int(line.Line),
+ Name: line.Function.Name,
+ }
+ if fname := line.Function.Filename; fname != "" {
+ ni.File = filepath.Clean(fname)
+ }
+ if o.OrigFnNames {
+ ni.OrigName = line.Function.SystemName
+ }
+ if o.ObjNames || (ni.Name == "" && ni.OrigName == "") {
+ ni.Objfile = objfile
+ ni.StartLine = int(line.Function.StartLine)
+ }
+ return ni
+}
+
+type tags struct {
+ t []*Tag
+ flat bool
+}
+
+func (t tags) Len() int { return len(t.t) }
+func (t tags) Swap(i, j int) { t.t[i], t.t[j] = t.t[j], t.t[i] }
+func (t tags) Less(i, j int) bool {
+ if !t.flat {
+ if t.t[i].Cum != t.t[j].Cum {
+ return abs64(t.t[i].Cum) > abs64(t.t[j].Cum)
+ }
+ }
+ if t.t[i].Flat != t.t[j].Flat {
+ return abs64(t.t[i].Flat) > abs64(t.t[j].Flat)
+ }
+ return t.t[i].Name < t.t[j].Name
+}
+
+// Sum adds the flat and cum values of a set of nodes.
+func (ns Nodes) Sum() (flat int64, cum int64) {
+ for _, n := range ns {
+ flat += n.Flat
+ cum += n.Cum
+ }
+ return
+}
+
+func (n *Node) addSample(dw, w int64, labels string, numLabel map[string][]int64, numUnit map[string][]string, format func(int64, string) string, flat bool) {
+ // Update sample value
+ if flat {
+ n.FlatDiv += dw
+ n.Flat += w
+ } else {
+ n.CumDiv += dw
+ n.Cum += w
+ }
+
+ // Add string tags
+ if labels != "" {
+ t := n.LabelTags.findOrAddTag(labels, "", 0)
+ if flat {
+ t.FlatDiv += dw
+ t.Flat += w
+ } else {
+ t.CumDiv += dw
+ t.Cum += w
+ }
+ }
+
+ numericTags := n.NumericTags[labels]
+ if numericTags == nil {
+ numericTags = TagMap{}
+ n.NumericTags[labels] = numericTags
+ }
+ // Add numeric tags
+ if format == nil {
+ format = defaultLabelFormat
+ }
+ for k, nvals := range numLabel {
+ units := numUnit[k]
+ for i, v := range nvals {
+ var t *Tag
+ if len(units) > 0 {
+ t = numericTags.findOrAddTag(format(v, units[i]), units[i], v)
+ } else {
+ t = numericTags.findOrAddTag(format(v, k), k, v)
+ }
+ if flat {
+ t.FlatDiv += dw
+ t.Flat += w
+ } else {
+ t.CumDiv += dw
+ t.Cum += w
+ }
+ }
+ }
+}
+
+func defaultLabelFormat(v int64, key string) string {
+ return strconv.FormatInt(v, 10)
+}
+
+func (m TagMap) findOrAddTag(label, unit string, value int64) *Tag {
+ l := m[label]
+ if l == nil {
+ l = &Tag{
+ Name: label,
+ Unit: unit,
+ Value: value,
+ }
+ m[label] = l
+ }
+ return l
+}
+
+// String returns a text representation of a graph, for debugging purposes.
+func (g *Graph) String() string {
+ var s []string
+
+ nodeIndex := make(map[*Node]int, len(g.Nodes))
+
+ for i, n := range g.Nodes {
+ nodeIndex[n] = i + 1
+ }
+
+ for i, n := range g.Nodes {
+ name := n.Info.PrintableName()
+ var in, out []int
+
+ for _, from := range n.In {
+ in = append(in, nodeIndex[from.Src])
+ }
+ for _, to := range n.Out {
+ out = append(out, nodeIndex[to.Dest])
+ }
+ s = append(s, fmt.Sprintf("%d: %s[flat=%d cum=%d] %x -> %v ", i+1, name, n.Flat, n.Cum, in, out))
+ }
+ return strings.Join(s, "\n")
+}
+
+// DiscardLowFrequencyNodes returns a set of the nodes at or over a
+// specific cum value cutoff.
+func (g *Graph) DiscardLowFrequencyNodes(nodeCutoff int64) NodeSet {
+ return makeNodeSet(g.Nodes, nodeCutoff)
+}
+
+// DiscardLowFrequencyNodePtrs returns a NodePtrSet of nodes at or over a
+// specific cum value cutoff.
+func (g *Graph) DiscardLowFrequencyNodePtrs(nodeCutoff int64) NodePtrSet {
+ cutNodes := getNodesAboveCumCutoff(g.Nodes, nodeCutoff)
+ kept := make(NodePtrSet, len(cutNodes))
+ for _, n := range cutNodes {
+ kept[n] = true
+ }
+ return kept
+}
+
+func makeNodeSet(nodes Nodes, nodeCutoff int64) NodeSet {
+ cutNodes := getNodesAboveCumCutoff(nodes, nodeCutoff)
+ kept := make(NodeSet, len(cutNodes))
+ for _, n := range cutNodes {
+ kept[n.Info] = true
+ }
+ return kept
+}
+
+// getNodesAboveCumCutoff returns all the nodes which have a Cum value greater
+// than or equal to cutoff.
+func getNodesAboveCumCutoff(nodes Nodes, nodeCutoff int64) Nodes {
+ cutoffNodes := make(Nodes, 0, len(nodes))
+ for _, n := range nodes {
+ if abs64(n.Cum) < nodeCutoff {
+ continue
+ }
+ cutoffNodes = append(cutoffNodes, n)
+ }
+ return cutoffNodes
+}
+
+// TrimLowFrequencyTags removes tags that have less than
+// the specified weight.
+func (g *Graph) TrimLowFrequencyTags(tagCutoff int64) {
+ // Remove nodes with value <= total*nodeFraction
+ for _, n := range g.Nodes {
+ n.LabelTags = trimLowFreqTags(n.LabelTags, tagCutoff)
+ for s, nt := range n.NumericTags {
+ n.NumericTags[s] = trimLowFreqTags(nt, tagCutoff)
+ }
+ }
+}
+
+func trimLowFreqTags(tags TagMap, minValue int64) TagMap {
+ kept := TagMap{}
+ for s, t := range tags {
+ if abs64(t.Flat) >= minValue || abs64(t.Cum) >= minValue {
+ kept[s] = t
+ }
+ }
+ return kept
+}
+
+// TrimLowFrequencyEdges removes edges that have less than
+// the specified weight. Returns the number of edges removed
+func (g *Graph) TrimLowFrequencyEdges(edgeCutoff int64) int {
+ var droppedEdges int
+ for _, n := range g.Nodes {
+ for src, e := range n.In {
+ if abs64(e.Weight) < edgeCutoff {
+ delete(n.In, src)
+ delete(src.Out, n)
+ droppedEdges++
+ }
+ }
+ }
+ return droppedEdges
+}
+
+// SortNodes sorts the nodes in a graph based on a specific heuristic.
+func (g *Graph) SortNodes(cum bool, visualMode bool) {
+ // Sort nodes based on requested mode
+ switch {
+ case visualMode:
+ // Specialized sort to produce a more visually-interesting graph
+ g.Nodes.Sort(EntropyOrder)
+ case cum:
+ g.Nodes.Sort(CumNameOrder)
+ default:
+ g.Nodes.Sort(FlatNameOrder)
+ }
+}
+
+// SelectTopNodePtrs returns a set of the top maxNodes *Node in a graph.
+func (g *Graph) SelectTopNodePtrs(maxNodes int, visualMode bool) NodePtrSet {
+ set := make(NodePtrSet)
+ for _, node := range g.selectTopNodes(maxNodes, visualMode) {
+ set[node] = true
+ }
+ return set
+}
+
+// SelectTopNodes returns a set of the top maxNodes nodes in a graph.
+func (g *Graph) SelectTopNodes(maxNodes int, visualMode bool) NodeSet {
+ return makeNodeSet(g.selectTopNodes(maxNodes, visualMode), 0)
+}
+
+// selectTopNodes returns a slice of the top maxNodes nodes in a graph.
+func (g *Graph) selectTopNodes(maxNodes int, visualMode bool) Nodes {
+ if maxNodes > 0 {
+ if visualMode {
+ var count int
+ // If generating a visual graph, count tags as nodes. Update
+ // maxNodes to account for them.
+ for i, n := range g.Nodes {
+ tags := countTags(n)
+ if tags > maxNodelets {
+ tags = maxNodelets
+ }
+ if count += tags + 1; count >= maxNodes {
+ maxNodes = i + 1
+ break
+ }
+ }
+ }
+ }
+ if maxNodes > len(g.Nodes) {
+ maxNodes = len(g.Nodes)
+ }
+ return g.Nodes[:maxNodes]
+}
+
+// countTags counts the tags with flat count. This underestimates the
+// number of tags being displayed, but in practice is close enough.
+func countTags(n *Node) int {
+ count := 0
+ for _, e := range n.LabelTags {
+ if e.Flat != 0 {
+ count++
+ }
+ }
+ for _, t := range n.NumericTags {
+ for _, e := range t {
+ if e.Flat != 0 {
+ count++
+ }
+ }
+ }
+ return count
+}
+
+// RemoveRedundantEdges removes residual edges if the destination can
+// be reached through another path. This is done to simplify the graph
+// while preserving connectivity.
+func (g *Graph) RemoveRedundantEdges() {
+ // Walk the nodes and outgoing edges in reverse order to prefer
+ // removing edges with the lowest weight.
+ for i := len(g.Nodes); i > 0; i-- {
+ n := g.Nodes[i-1]
+ in := n.In.Sort()
+ for j := len(in); j > 0; j-- {
+ e := in[j-1]
+ if !e.Residual {
+ // Do not remove edges heavier than a non-residual edge, to
+ // avoid potential confusion.
+ break
+ }
+ if isRedundantEdge(e) {
+ delete(e.Src.Out, e.Dest)
+ delete(e.Dest.In, e.Src)
+ }
+ }
+ }
+}
+
+// isRedundantEdge determines if there is a path that allows e.Src
+// to reach e.Dest after removing e.
+func isRedundantEdge(e *Edge) bool {
+ src, n := e.Src, e.Dest
+ seen := map[*Node]bool{n: true}
+ queue := Nodes{n}
+ for len(queue) > 0 {
+ n := queue[0]
+ queue = queue[1:]
+ for _, ie := range n.In {
+ if e == ie || seen[ie.Src] {
+ continue
+ }
+ if ie.Src == src {
+ return true
+ }
+ seen[ie.Src] = true
+ queue = append(queue, ie.Src)
+ }
+ }
+ return false
+}
+
+// nodeSorter is a mechanism used to allow a report to be sorted
+// in different ways.
+type nodeSorter struct {
+ rs Nodes
+ less func(l, r *Node) bool
+}
+
+func (s nodeSorter) Len() int { return len(s.rs) }
+func (s nodeSorter) Swap(i, j int) { s.rs[i], s.rs[j] = s.rs[j], s.rs[i] }
+func (s nodeSorter) Less(i, j int) bool { return s.less(s.rs[i], s.rs[j]) }
+
+// Sort reorders a slice of nodes based on the specified ordering
+// criteria. The result is sorted in decreasing order for (absolute)
+// numeric quantities, alphabetically for text, and increasing for
+// addresses.
+func (ns Nodes) Sort(o NodeOrder) error {
+ var s nodeSorter
+
+ switch o {
+ case FlatNameOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv {
+ return iv < jv
+ }
+ if iv, jv := abs64(l.Cum), abs64(r.Cum); iv != jv {
+ return iv > jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case FlatCumNameOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := abs64(l.Cum), abs64(r.Cum); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case NameOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := l.Info.Name, r.Info.Name; iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case FileOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := l.Info.File, r.Info.File; iv != jv {
+ return iv < jv
+ }
+ if iv, jv := l.Info.StartLine, r.Info.StartLine; iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case AddressOrder:
+ s = nodeSorter{ns,
+ func(l, r *Node) bool {
+ if iv, jv := l.Info.Address, r.Info.Address; iv != jv {
+ return iv < jv
+ }
+ return compareNodes(l, r)
+ },
+ }
+ case CumNameOrder, EntropyOrder:
+ // Hold scoring for score-based ordering
+ var score map[*Node]int64
+ scoreOrder := func(l, r *Node) bool {
+ if iv, jv := abs64(score[l]), abs64(score[r]); iv != jv {
+ return iv > jv
+ }
+ if iv, jv := l.Info.PrintableName(), r.Info.PrintableName(); iv != jv {
+ return iv < jv
+ }
+ if iv, jv := abs64(l.Flat), abs64(r.Flat); iv != jv {
+ return iv > jv
+ }
+ return compareNodes(l, r)
+ }
+
+ switch o {
+ case CumNameOrder:
+ score = make(map[*Node]int64, len(ns))
+ for _, n := range ns {
+ score[n] = n.Cum
+ }
+ s = nodeSorter{ns, scoreOrder}
+ case EntropyOrder:
+ score = make(map[*Node]int64, len(ns))
+ for _, n := range ns {
+ score[n] = entropyScore(n)
+ }
+ s = nodeSorter{ns, scoreOrder}
+ }
+ default:
+ return fmt.Errorf("report: unrecognized sort ordering: %d", o)
+ }
+ sort.Sort(s)
+ return nil
+}
+
+// compareNodes compares two nodes to provide a deterministic ordering
+// between them. Two nodes cannot have the same Node.Info value.
+func compareNodes(l, r *Node) bool {
+ return fmt.Sprint(l.Info) < fmt.Sprint(r.Info)
+}
+
+// entropyScore computes a score for a node representing how important
+// it is to include this node on a graph visualization. It is used to
+// sort the nodes and select which ones to display if we have more
+// nodes than desired in the graph. This number is computed by looking
+// at the flat and cum weights of the node and the incoming/outgoing
+// edges. The fundamental idea is to penalize nodes that have a simple
+// fallthrough from their incoming to the outgoing edge.
+func entropyScore(n *Node) int64 {
+ score := float64(0)
+
+ if len(n.In) == 0 {
+ score++ // Favor entry nodes
+ } else {
+ score += edgeEntropyScore(n, n.In, 0)
+ }
+
+ if len(n.Out) == 0 {
+ score++ // Favor leaf nodes
+ } else {
+ score += edgeEntropyScore(n, n.Out, n.Flat)
+ }
+
+ return int64(score*float64(n.Cum)) + n.Flat
+}
+
+// edgeEntropyScore computes the entropy value for a set of edges
+// coming in or out of a node. Entropy (as defined in information
+// theory) refers to the amount of information encoded by the set of
+// edges. A set of edges that have a more interesting distribution of
+// samples gets a higher score.
+func edgeEntropyScore(n *Node, edges EdgeMap, self int64) float64 {
+ score := float64(0)
+ total := self
+ for _, e := range edges {
+ if e.Weight > 0 {
+ total += abs64(e.Weight)
+ }
+ }
+ if total != 0 {
+ for _, e := range edges {
+ frac := float64(abs64(e.Weight)) / float64(total)
+ score += -frac * math.Log2(frac)
+ }
+ if self > 0 {
+ frac := float64(abs64(self)) / float64(total)
+ score += -frac * math.Log2(frac)
+ }
+ }
+ return score
+}
+
+// NodeOrder sets the ordering for a Sort operation
+type NodeOrder int
+
+// Sorting options for node sort.
+const (
+ FlatNameOrder NodeOrder = iota
+ FlatCumNameOrder
+ CumNameOrder
+ NameOrder
+ FileOrder
+ AddressOrder
+ EntropyOrder
+)
+
+// Sort returns a slice of the edges in the map, in a consistent
+// order. The sort order is first based on the edge weight
+// (higher-to-lower) and then by the node names to avoid flakiness.
+func (e EdgeMap) Sort() []*Edge {
+ el := make(edgeList, 0, len(e))
+ for _, w := range e {
+ el = append(el, w)
+ }
+
+ sort.Sort(el)
+ return el
+}
+
+// Sum returns the total weight for a set of nodes.
+func (e EdgeMap) Sum() int64 {
+ var ret int64
+ for _, edge := range e {
+ ret += edge.Weight
+ }
+ return ret
+}
+
+type edgeList []*Edge
+
+func (el edgeList) Len() int {
+ return len(el)
+}
+
+func (el edgeList) Less(i, j int) bool {
+ if el[i].Weight != el[j].Weight {
+ return abs64(el[i].Weight) > abs64(el[j].Weight)
+ }
+
+ from1 := el[i].Src.Info.PrintableName()
+ from2 := el[j].Src.Info.PrintableName()
+ if from1 != from2 {
+ return from1 < from2
+ }
+
+ to1 := el[i].Dest.Info.PrintableName()
+ to2 := el[j].Dest.Info.PrintableName()
+
+ return to1 < to2
+}
+
+func (el edgeList) Swap(i, j int) {
+ el[i], el[j] = el[j], el[i]
+}
+
+func abs64(i int64) int64 {
+ if i < 0 {
+ return -i
+ }
+ return i
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go b/src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go
new file mode 100644
index 0000000..e95b261
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/measurement/measurement.go
@@ -0,0 +1,328 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package measurement export utility functions to manipulate/format performance profile sample values.
+package measurement
+
+import (
+ "fmt"
+ "math"
+ "strings"
+ "time"
+
+ "github.com/google/pprof/profile"
+)
+
+// ScaleProfiles updates the units in a set of profiles to make them
+// compatible. It scales the profiles to the smallest unit to preserve
+// data.
+func ScaleProfiles(profiles []*profile.Profile) error {
+ if len(profiles) == 0 {
+ return nil
+ }
+ periodTypes := make([]*profile.ValueType, 0, len(profiles))
+ for _, p := range profiles {
+ if p.PeriodType != nil {
+ periodTypes = append(periodTypes, p.PeriodType)
+ }
+ }
+ periodType, err := CommonValueType(periodTypes)
+ if err != nil {
+ return fmt.Errorf("period type: %v", err)
+ }
+
+ // Identify common sample types
+ numSampleTypes := len(profiles[0].SampleType)
+ for _, p := range profiles[1:] {
+ if numSampleTypes != len(p.SampleType) {
+ return fmt.Errorf("inconsistent samples type count: %d != %d", numSampleTypes, len(p.SampleType))
+ }
+ }
+ sampleType := make([]*profile.ValueType, numSampleTypes)
+ for i := 0; i < numSampleTypes; i++ {
+ sampleTypes := make([]*profile.ValueType, len(profiles))
+ for j, p := range profiles {
+ sampleTypes[j] = p.SampleType[i]
+ }
+ sampleType[i], err = CommonValueType(sampleTypes)
+ if err != nil {
+ return fmt.Errorf("sample types: %v", err)
+ }
+ }
+
+ for _, p := range profiles {
+ if p.PeriodType != nil && periodType != nil {
+ period, _ := Scale(p.Period, p.PeriodType.Unit, periodType.Unit)
+ p.Period, p.PeriodType.Unit = int64(period), periodType.Unit
+ }
+ ratios := make([]float64, len(p.SampleType))
+ for i, st := range p.SampleType {
+ if sampleType[i] == nil {
+ ratios[i] = 1
+ continue
+ }
+ ratios[i], _ = Scale(1, st.Unit, sampleType[i].Unit)
+ p.SampleType[i].Unit = sampleType[i].Unit
+ }
+ if err := p.ScaleN(ratios); err != nil {
+ return fmt.Errorf("scale: %v", err)
+ }
+ }
+ return nil
+}
+
+// CommonValueType returns the finest type from a set of compatible
+// types.
+func CommonValueType(ts []*profile.ValueType) (*profile.ValueType, error) {
+ if len(ts) <= 1 {
+ return nil, nil
+ }
+ minType := ts[0]
+ for _, t := range ts[1:] {
+ if !compatibleValueTypes(minType, t) {
+ return nil, fmt.Errorf("incompatible types: %v %v", *minType, *t)
+ }
+ if ratio, _ := Scale(1, t.Unit, minType.Unit); ratio < 1 {
+ minType = t
+ }
+ }
+ rcopy := *minType
+ return &rcopy, nil
+}
+
+func compatibleValueTypes(v1, v2 *profile.ValueType) bool {
+ if v1 == nil || v2 == nil {
+ return true // No grounds to disqualify.
+ }
+ // Remove trailing 's' to permit minor mismatches.
+ if t1, t2 := strings.TrimSuffix(v1.Type, "s"), strings.TrimSuffix(v2.Type, "s"); t1 != t2 {
+ return false
+ }
+
+ return v1.Unit == v2.Unit ||
+ (isTimeUnit(v1.Unit) && isTimeUnit(v2.Unit)) ||
+ (isMemoryUnit(v1.Unit) && isMemoryUnit(v2.Unit))
+}
+
+// Scale a measurement from an unit to a different unit and returns
+// the scaled value and the target unit. The returned target unit
+// will be empty if uninteresting (could be skipped).
+func Scale(value int64, fromUnit, toUnit string) (float64, string) {
+ // Avoid infinite recursion on overflow.
+ if value < 0 && -value > 0 {
+ v, u := Scale(-value, fromUnit, toUnit)
+ return -v, u
+ }
+ if m, u, ok := memoryLabel(value, fromUnit, toUnit); ok {
+ return m, u
+ }
+ if t, u, ok := timeLabel(value, fromUnit, toUnit); ok {
+ return t, u
+ }
+ // Skip non-interesting units.
+ switch toUnit {
+ case "count", "sample", "unit", "minimum", "auto":
+ return float64(value), ""
+ default:
+ return float64(value), toUnit
+ }
+}
+
+// Label returns the label used to describe a certain measurement.
+func Label(value int64, unit string) string {
+ return ScaledLabel(value, unit, "auto")
+}
+
+// ScaledLabel scales the passed-in measurement (if necessary) and
+// returns the label used to describe a float measurement.
+func ScaledLabel(value int64, fromUnit, toUnit string) string {
+ v, u := Scale(value, fromUnit, toUnit)
+ sv := strings.TrimSuffix(fmt.Sprintf("%.2f", v), ".00")
+ if sv == "0" || sv == "-0" {
+ return "0"
+ }
+ return sv + u
+}
+
+// Percentage computes the percentage of total of a value, and encodes
+// it as a string. At least two digits of precision are printed.
+func Percentage(value, total int64) string {
+ var ratio float64
+ if total != 0 {
+ ratio = math.Abs(float64(value)/float64(total)) * 100
+ }
+ switch {
+ case math.Abs(ratio) >= 99.95 && math.Abs(ratio) <= 100.05:
+ return " 100%"
+ case math.Abs(ratio) >= 1.0:
+ return fmt.Sprintf("%5.2f%%", ratio)
+ default:
+ return fmt.Sprintf("%5.2g%%", ratio)
+ }
+}
+
+// isMemoryUnit returns whether a name is recognized as a memory size
+// unit.
+func isMemoryUnit(unit string) bool {
+ switch strings.TrimSuffix(strings.ToLower(unit), "s") {
+ case "byte", "b", "kilobyte", "kb", "megabyte", "mb", "gigabyte", "gb":
+ return true
+ }
+ return false
+}
+
+func memoryLabel(value int64, fromUnit, toUnit string) (v float64, u string, ok bool) {
+ fromUnit = strings.TrimSuffix(strings.ToLower(fromUnit), "s")
+ toUnit = strings.TrimSuffix(strings.ToLower(toUnit), "s")
+
+ switch fromUnit {
+ case "byte", "b":
+ case "kb", "kbyte", "kilobyte":
+ value *= 1024
+ case "mb", "mbyte", "megabyte":
+ value *= 1024 * 1024
+ case "gb", "gbyte", "gigabyte":
+ value *= 1024 * 1024 * 1024
+ case "tb", "tbyte", "terabyte":
+ value *= 1024 * 1024 * 1024 * 1024
+ case "pb", "pbyte", "petabyte":
+ value *= 1024 * 1024 * 1024 * 1024 * 1024
+ default:
+ return 0, "", false
+ }
+
+ if toUnit == "minimum" || toUnit == "auto" {
+ switch {
+ case value < 1024:
+ toUnit = "b"
+ case value < 1024*1024:
+ toUnit = "kb"
+ case value < 1024*1024*1024:
+ toUnit = "mb"
+ case value < 1024*1024*1024*1024:
+ toUnit = "gb"
+ case value < 1024*1024*1024*1024*1024:
+ toUnit = "tb"
+ default:
+ toUnit = "pb"
+ }
+ }
+
+ var output float64
+ switch toUnit {
+ default:
+ output, toUnit = float64(value), "B"
+ case "kb", "kbyte", "kilobyte":
+ output, toUnit = float64(value)/1024, "kB"
+ case "mb", "mbyte", "megabyte":
+ output, toUnit = float64(value)/(1024*1024), "MB"
+ case "gb", "gbyte", "gigabyte":
+ output, toUnit = float64(value)/(1024*1024*1024), "GB"
+ case "tb", "tbyte", "terabyte":
+ output, toUnit = float64(value)/(1024*1024*1024*1024), "TB"
+ case "pb", "pbyte", "petabyte":
+ output, toUnit = float64(value)/(1024*1024*1024*1024*1024), "PB"
+ }
+ return output, toUnit, true
+}
+
+// isTimeUnit returns whether a name is recognized as a time unit.
+func isTimeUnit(unit string) bool {
+ unit = strings.ToLower(unit)
+ if len(unit) > 2 {
+ unit = strings.TrimSuffix(unit, "s")
+ }
+
+ switch unit {
+ case "nanosecond", "ns", "microsecond", "millisecond", "ms", "s", "second", "sec", "hr", "day", "week", "year":
+ return true
+ }
+ return false
+}
+
+func timeLabel(value int64, fromUnit, toUnit string) (v float64, u string, ok bool) {
+ fromUnit = strings.ToLower(fromUnit)
+ if len(fromUnit) > 2 {
+ fromUnit = strings.TrimSuffix(fromUnit, "s")
+ }
+
+ toUnit = strings.ToLower(toUnit)
+ if len(toUnit) > 2 {
+ toUnit = strings.TrimSuffix(toUnit, "s")
+ }
+
+ var d time.Duration
+ switch fromUnit {
+ case "nanosecond", "ns":
+ d = time.Duration(value) * time.Nanosecond
+ case "microsecond":
+ d = time.Duration(value) * time.Microsecond
+ case "millisecond", "ms":
+ d = time.Duration(value) * time.Millisecond
+ case "second", "sec", "s":
+ d = time.Duration(value) * time.Second
+ case "cycle":
+ return float64(value), "", true
+ default:
+ return 0, "", false
+ }
+
+ if toUnit == "minimum" || toUnit == "auto" {
+ switch {
+ case d < 1*time.Microsecond:
+ toUnit = "ns"
+ case d < 1*time.Millisecond:
+ toUnit = "us"
+ case d < 1*time.Second:
+ toUnit = "ms"
+ case d < 1*time.Minute:
+ toUnit = "sec"
+ case d < 1*time.Hour:
+ toUnit = "min"
+ case d < 24*time.Hour:
+ toUnit = "hour"
+ case d < 15*24*time.Hour:
+ toUnit = "day"
+ case d < 120*24*time.Hour:
+ toUnit = "week"
+ default:
+ toUnit = "year"
+ }
+ }
+
+ var output float64
+ dd := float64(d)
+ switch toUnit {
+ case "ns", "nanosecond":
+ output, toUnit = dd/float64(time.Nanosecond), "ns"
+ case "us", "microsecond":
+ output, toUnit = dd/float64(time.Microsecond), "us"
+ case "ms", "millisecond":
+ output, toUnit = dd/float64(time.Millisecond), "ms"
+ case "min", "minute":
+ output, toUnit = dd/float64(time.Minute), "mins"
+ case "hour", "hr":
+ output, toUnit = dd/float64(time.Hour), "hrs"
+ case "day":
+ output, toUnit = dd/float64(24*time.Hour), "days"
+ case "week", "wk":
+ output, toUnit = dd/float64(7*24*time.Hour), "wks"
+ case "year", "yr":
+ output, toUnit = dd/float64(365*24*time.Hour), "yrs"
+ default:
+ // "sec", "second", "s" handled by default case.
+ output, toUnit = dd/float64(time.Second), "s"
+ }
+ return output, toUnit, true
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go b/src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go
new file mode 100644
index 0000000..3a8d0af
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/plugin/plugin.go
@@ -0,0 +1,213 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package plugin defines the plugin implementations that the main pprof driver requires.
+package plugin
+
+import (
+ "io"
+ "net/http"
+ "regexp"
+ "time"
+
+ "github.com/google/pprof/profile"
+)
+
+// Options groups all the optional plugins into pprof.
+type Options struct {
+ Writer Writer
+ Flagset FlagSet
+ Fetch Fetcher
+ Sym Symbolizer
+ Obj ObjTool
+ UI UI
+
+ // HTTPServer is a function that should block serving http requests,
+ // including the handlers specified in args. If non-nil, pprof will
+ // invoke this function if necessary to provide a web interface.
+ //
+ // If HTTPServer is nil, pprof will use its own internal HTTP server.
+ //
+ // A common use for a custom HTTPServer is to provide custom
+ // authentication checks.
+ HTTPServer func(args *HTTPServerArgs) error
+ HTTPTransport http.RoundTripper
+}
+
+// Writer provides a mechanism to write data under a certain name,
+// typically a filename.
+type Writer interface {
+ Open(name string) (io.WriteCloser, error)
+}
+
+// A FlagSet creates and parses command-line flags.
+// It is similar to the standard flag.FlagSet.
+type FlagSet interface {
+ // Bool, Int, Float64, and String define new flags,
+ // like the functions of the same name in package flag.
+ Bool(name string, def bool, usage string) *bool
+ Int(name string, def int, usage string) *int
+ Float64(name string, def float64, usage string) *float64
+ String(name string, def string, usage string) *string
+
+ // StringList is similar to String but allows multiple values for a
+ // single flag
+ StringList(name string, def string, usage string) *[]*string
+
+ // ExtraUsage returns any additional text that should be printed after the
+ // standard usage message. The extra usage message returned includes all text
+ // added with AddExtraUsage().
+ // The typical use of ExtraUsage is to show any custom flags defined by the
+ // specific pprof plugins being used.
+ ExtraUsage() string
+
+ // AddExtraUsage appends additional text to the end of the extra usage message.
+ AddExtraUsage(eu string)
+
+ // Parse initializes the flags with their values for this run
+ // and returns the non-flag command line arguments.
+ // If an unknown flag is encountered or there are no arguments,
+ // Parse should call usage and return nil.
+ Parse(usage func()) []string
+}
+
+// A Fetcher reads and returns the profile named by src. src can be a
+// local file path or a URL. duration and timeout are units specified
+// by the end user, or 0 by default. duration refers to the length of
+// the profile collection, if applicable, and timeout is the amount of
+// time to wait for a profile before returning an error. Returns the
+// fetched profile, the URL of the actual source of the profile, or an
+// error.
+type Fetcher interface {
+ Fetch(src string, duration, timeout time.Duration) (*profile.Profile, string, error)
+}
+
+// A Symbolizer introduces symbol information into a profile.
+type Symbolizer interface {
+ Symbolize(mode string, srcs MappingSources, prof *profile.Profile) error
+}
+
+// MappingSources map each profile.Mapping to the source of the profile.
+// The key is either Mapping.File or Mapping.BuildId.
+type MappingSources map[string][]struct {
+ Source string // URL of the source the mapping was collected from
+ Start uint64 // delta applied to addresses from this source (to represent Merge adjustments)
+}
+
+// An ObjTool inspects shared libraries and executable files.
+type ObjTool interface {
+ // Open opens the named object file. If the object is a shared
+ // library, start/limit/offset are the addresses where it is mapped
+ // into memory in the address space being inspected.
+ Open(file string, start, limit, offset uint64) (ObjFile, error)
+
+ // Disasm disassembles the named object file, starting at
+ // the start address and stopping at (before) the end address.
+ Disasm(file string, start, end uint64, intelSyntax bool) ([]Inst, error)
+}
+
+// An Inst is a single instruction in an assembly listing.
+type Inst struct {
+ Addr uint64 // virtual address of instruction
+ Text string // instruction text
+ Function string // function name
+ File string // source file
+ Line int // source line
+}
+
+// An ObjFile is a single object file: a shared library or executable.
+type ObjFile interface {
+ // Name returns the underlyinf file name, if available
+ Name() string
+
+ // Base returns the base address to use when looking up symbols in the file.
+ Base() uint64
+
+ // BuildID returns the GNU build ID of the file, or an empty string.
+ BuildID() string
+
+ // SourceLine reports the source line information for a given
+ // address in the file. Due to inlining, the source line information
+ // is in general a list of positions representing a call stack,
+ // with the leaf function first.
+ SourceLine(addr uint64) ([]Frame, error)
+
+ // Symbols returns a list of symbols in the object file.
+ // If r is not nil, Symbols restricts the list to symbols
+ // with names matching the regular expression.
+ // If addr is not zero, Symbols restricts the list to symbols
+ // containing that address.
+ Symbols(r *regexp.Regexp, addr uint64) ([]*Sym, error)
+
+ // Close closes the file, releasing associated resources.
+ Close() error
+}
+
+// A Frame describes a single line in a source file.
+type Frame struct {
+ Func string // name of function
+ File string // source file name
+ Line int // line in file
+}
+
+// A Sym describes a single symbol in an object file.
+type Sym struct {
+ Name []string // names of symbol (many if symbol was dedup'ed)
+ File string // object file containing symbol
+ Start uint64 // start virtual address
+ End uint64 // virtual address of last byte in sym (Start+size-1)
+}
+
+// A UI manages user interactions.
+type UI interface {
+ // Read returns a line of text (a command) read from the user.
+ // prompt is printed before reading the command.
+ ReadLine(prompt string) (string, error)
+
+ // Print shows a message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, Print writes to standard error.
+ // (Standard output is reserved for report data.)
+ Print(...interface{})
+
+ // PrintErr shows an error message to the user.
+ // It formats the text as fmt.Print would and adds a final \n if not already present.
+ // For line-based UI, PrintErr writes to standard error.
+ PrintErr(...interface{})
+
+ // IsTerminal returns whether the UI is known to be tied to an
+ // interactive terminal (as opposed to being redirected to a file).
+ IsTerminal() bool
+
+ // WantBrowser indicates whether a browser should be opened with the -http option.
+ WantBrowser() bool
+
+ // SetAutoComplete instructs the UI to call complete(cmd) to obtain
+ // the auto-completion of cmd, if the UI supports auto-completion at all.
+ SetAutoComplete(complete func(string) string)
+}
+
+// HTTPServerArgs contains arguments needed by an HTTP server that
+// is exporting a pprof web interface.
+type HTTPServerArgs struct {
+ // Hostport contains the http server address (derived from flags).
+ Hostport string
+
+ Host string // Host portion of Hostport
+ Port int // Port portion of Hostport
+
+ // Handlers maps from URL paths to the handler to invoke to
+ // serve that path.
+ Handlers map[string]http.Handler
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/report.go b/src/cmd/vendor/github.com/google/pprof/internal/report/report.go
new file mode 100644
index 0000000..bc5685d
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/report.go
@@ -0,0 +1,1313 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package report summarizes a performance profile into a
+// human-readable report.
+package report
+
+import (
+ "fmt"
+ "io"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "text/tabwriter"
+ "time"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+// Output formats.
+const (
+ Callgrind = iota
+ Comments
+ Dis
+ Dot
+ List
+ Proto
+ Raw
+ Tags
+ Text
+ TopProto
+ Traces
+ Tree
+ WebList
+)
+
+// Options are the formatting and filtering options used to generate a
+// profile.
+type Options struct {
+ OutputFormat int
+
+ CumSort bool
+ CallTree bool
+ DropNegative bool
+ CompactLabels bool
+ Ratio float64
+ Title string
+ ProfileLabels []string
+ ActiveFilters []string
+ NumLabelUnits map[string]string
+
+ NodeCount int
+ NodeFraction float64
+ EdgeFraction float64
+
+ SampleValue func(s []int64) int64
+ SampleMeanDivisor func(s []int64) int64
+ SampleType string
+ SampleUnit string // Unit for the sample data from the profile.
+
+ OutputUnit string // Units for data formatting in report.
+
+ Symbol *regexp.Regexp // Symbols to include on disassembly report.
+ SourcePath string // Search path for source files.
+ TrimPath string // Paths to trim from source file paths.
+
+ IntelSyntax bool // Whether or not to print assembly in Intel syntax.
+}
+
+// Generate generates a report as directed by the Report.
+func Generate(w io.Writer, rpt *Report, obj plugin.ObjTool) error {
+ o := rpt.options
+
+ switch o.OutputFormat {
+ case Comments:
+ return printComments(w, rpt)
+ case Dot:
+ return printDOT(w, rpt)
+ case Tree:
+ return printTree(w, rpt)
+ case Text:
+ return printText(w, rpt)
+ case Traces:
+ return printTraces(w, rpt)
+ case Raw:
+ fmt.Fprint(w, rpt.prof.String())
+ return nil
+ case Tags:
+ return printTags(w, rpt)
+ case Proto:
+ return printProto(w, rpt)
+ case TopProto:
+ return printTopProto(w, rpt)
+ case Dis:
+ return printAssembly(w, rpt, obj)
+ case List:
+ return printSource(w, rpt)
+ case WebList:
+ return printWebSource(w, rpt, obj)
+ case Callgrind:
+ return printCallgrind(w, rpt)
+ }
+ return fmt.Errorf("unexpected output format")
+}
+
+// newTrimmedGraph creates a graph for this report, trimmed according
+// to the report options.
+func (rpt *Report) newTrimmedGraph() (g *graph.Graph, origCount, droppedNodes, droppedEdges int) {
+ o := rpt.options
+
+ // Build a graph and refine it. On each refinement step we must rebuild the graph from the samples,
+ // as the graph itself doesn't contain enough information to preserve full precision.
+ visualMode := o.OutputFormat == Dot
+ cumSort := o.CumSort
+
+ // The call_tree option is only honored when generating visual representations of the callgraph.
+ callTree := o.CallTree && (o.OutputFormat == Dot || o.OutputFormat == Callgrind)
+
+ // First step: Build complete graph to identify low frequency nodes, based on their cum weight.
+ g = rpt.newGraph(nil)
+ totalValue, _ := g.Nodes.Sum()
+ nodeCutoff := abs64(int64(float64(totalValue) * o.NodeFraction))
+ edgeCutoff := abs64(int64(float64(totalValue) * o.EdgeFraction))
+
+ // Filter out nodes with cum value below nodeCutoff.
+ if nodeCutoff > 0 {
+ if callTree {
+ if nodesKept := g.DiscardLowFrequencyNodePtrs(nodeCutoff); len(g.Nodes) != len(nodesKept) {
+ droppedNodes = len(g.Nodes) - len(nodesKept)
+ g.TrimTree(nodesKept)
+ }
+ } else {
+ if nodesKept := g.DiscardLowFrequencyNodes(nodeCutoff); len(g.Nodes) != len(nodesKept) {
+ droppedNodes = len(g.Nodes) - len(nodesKept)
+ g = rpt.newGraph(nodesKept)
+ }
+ }
+ }
+ origCount = len(g.Nodes)
+
+ // Second step: Limit the total number of nodes. Apply specialized heuristics to improve
+ // visualization when generating dot output.
+ g.SortNodes(cumSort, visualMode)
+ if nodeCount := o.NodeCount; nodeCount > 0 {
+ // Remove low frequency tags and edges as they affect selection.
+ g.TrimLowFrequencyTags(nodeCutoff)
+ g.TrimLowFrequencyEdges(edgeCutoff)
+ if callTree {
+ if nodesKept := g.SelectTopNodePtrs(nodeCount, visualMode); len(g.Nodes) != len(nodesKept) {
+ g.TrimTree(nodesKept)
+ g.SortNodes(cumSort, visualMode)
+ }
+ } else {
+ if nodesKept := g.SelectTopNodes(nodeCount, visualMode); len(g.Nodes) != len(nodesKept) {
+ g = rpt.newGraph(nodesKept)
+ g.SortNodes(cumSort, visualMode)
+ }
+ }
+ }
+
+ // Final step: Filter out low frequency tags and edges, and remove redundant edges that clutter
+ // the graph.
+ g.TrimLowFrequencyTags(nodeCutoff)
+ droppedEdges = g.TrimLowFrequencyEdges(edgeCutoff)
+ if visualMode {
+ g.RemoveRedundantEdges()
+ }
+ return
+}
+
+func (rpt *Report) selectOutputUnit(g *graph.Graph) {
+ o := rpt.options
+
+ // Select best unit for profile output.
+ // Find the appropriate units for the smallest non-zero sample
+ if o.OutputUnit != "minimum" || len(g.Nodes) == 0 {
+ return
+ }
+ var minValue int64
+
+ for _, n := range g.Nodes {
+ nodeMin := abs64(n.FlatValue())
+ if nodeMin == 0 {
+ nodeMin = abs64(n.CumValue())
+ }
+ if nodeMin > 0 && (minValue == 0 || nodeMin < minValue) {
+ minValue = nodeMin
+ }
+ }
+ maxValue := rpt.total
+ if minValue == 0 {
+ minValue = maxValue
+ }
+
+ if r := o.Ratio; r > 0 && r != 1 {
+ minValue = int64(float64(minValue) * r)
+ maxValue = int64(float64(maxValue) * r)
+ }
+
+ _, minUnit := measurement.Scale(minValue, o.SampleUnit, "minimum")
+ _, maxUnit := measurement.Scale(maxValue, o.SampleUnit, "minimum")
+
+ unit := minUnit
+ if minUnit != maxUnit && minValue*100 < maxValue && o.OutputFormat != Callgrind {
+ // Minimum and maximum values have different units. Scale
+ // minimum by 100 to use larger units, allowing minimum value to
+ // be scaled down to 0.01, except for callgrind reports since
+ // they can only represent integer values.
+ _, unit = measurement.Scale(100*minValue, o.SampleUnit, "minimum")
+ }
+
+ if unit != "" {
+ o.OutputUnit = unit
+ } else {
+ o.OutputUnit = o.SampleUnit
+ }
+}
+
+// newGraph creates a new graph for this report. If nodes is non-nil,
+// only nodes whose info matches are included. Otherwise, all nodes
+// are included, without trimming.
+func (rpt *Report) newGraph(nodes graph.NodeSet) *graph.Graph {
+ o := rpt.options
+
+ // Clean up file paths using heuristics.
+ prof := rpt.prof
+ for _, f := range prof.Function {
+ f.Filename = trimPath(f.Filename, o.TrimPath, o.SourcePath)
+ }
+ // Removes all numeric tags except for the bytes tag prior
+ // to making graph.
+ // TODO: modify to select first numeric tag if no bytes tag
+ for _, s := range prof.Sample {
+ numLabels := make(map[string][]int64, len(s.NumLabel))
+ numUnits := make(map[string][]string, len(s.NumLabel))
+ for k, vs := range s.NumLabel {
+ if k == "bytes" {
+ unit := o.NumLabelUnits[k]
+ numValues := make([]int64, len(vs))
+ numUnit := make([]string, len(vs))
+ for i, v := range vs {
+ numValues[i] = v
+ numUnit[i] = unit
+ }
+ numLabels[k] = append(numLabels[k], numValues...)
+ numUnits[k] = append(numUnits[k], numUnit...)
+ }
+ }
+ s.NumLabel = numLabels
+ s.NumUnit = numUnits
+ }
+
+ // Remove label marking samples from the base profiles, so it does not appear
+ // as a nodelet in the graph view.
+ prof.RemoveLabel("pprof::base")
+
+ formatTag := func(v int64, key string) string {
+ return measurement.ScaledLabel(v, key, o.OutputUnit)
+ }
+
+ gopt := &graph.Options{
+ SampleValue: o.SampleValue,
+ SampleMeanDivisor: o.SampleMeanDivisor,
+ FormatTag: formatTag,
+ CallTree: o.CallTree && (o.OutputFormat == Dot || o.OutputFormat == Callgrind),
+ DropNegative: o.DropNegative,
+ KeptNodes: nodes,
+ }
+
+ // Only keep binary names for disassembly-based reports, otherwise
+ // remove it to allow merging of functions across binaries.
+ switch o.OutputFormat {
+ case Raw, List, WebList, Dis, Callgrind:
+ gopt.ObjNames = true
+ }
+
+ return graph.New(rpt.prof, gopt)
+}
+
+// printProto writes the incoming proto via thw writer w.
+// If the divide_by option has been specified, samples are scaled appropriately.
+func printProto(w io.Writer, rpt *Report) error {
+ p, o := rpt.prof, rpt.options
+
+ // Apply the sample ratio to all samples before saving the profile.
+ if r := o.Ratio; r > 0 && r != 1 {
+ for _, sample := range p.Sample {
+ for i, v := range sample.Value {
+ sample.Value[i] = int64(float64(v) * r)
+ }
+ }
+ }
+ return p.Write(w)
+}
+
+// printTopProto writes a list of the hottest routines in a profile as a profile.proto.
+func printTopProto(w io.Writer, rpt *Report) error {
+ p := rpt.prof
+ o := rpt.options
+ g, _, _, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+
+ out := profile.Profile{
+ SampleType: []*profile.ValueType{
+ {Type: "cum", Unit: o.OutputUnit},
+ {Type: "flat", Unit: o.OutputUnit},
+ },
+ TimeNanos: p.TimeNanos,
+ DurationNanos: p.DurationNanos,
+ PeriodType: p.PeriodType,
+ Period: p.Period,
+ }
+ functionMap := make(functionMap)
+ for i, n := range g.Nodes {
+ f, added := functionMap.findOrAdd(n.Info)
+ if added {
+ out.Function = append(out.Function, f)
+ }
+ flat, cum := n.FlatValue(), n.CumValue()
+ l := &profile.Location{
+ ID: uint64(i + 1),
+ Address: n.Info.Address,
+ Line: []profile.Line{
+ {
+ Line: int64(n.Info.Lineno),
+ Function: f,
+ },
+ },
+ }
+
+ fv, _ := measurement.Scale(flat, o.SampleUnit, o.OutputUnit)
+ cv, _ := measurement.Scale(cum, o.SampleUnit, o.OutputUnit)
+ s := &profile.Sample{
+ Location: []*profile.Location{l},
+ Value: []int64{int64(cv), int64(fv)},
+ }
+ out.Location = append(out.Location, l)
+ out.Sample = append(out.Sample, s)
+ }
+
+ return out.Write(w)
+}
+
+type functionMap map[string]*profile.Function
+
+// findOrAdd takes a node representing a function, adds the function
+// represented by the node to the map if the function is not already present,
+// and returns the function the node represents. This also returns a boolean,
+// which is true if the function was added and false otherwise.
+func (fm functionMap) findOrAdd(ni graph.NodeInfo) (*profile.Function, bool) {
+ fName := fmt.Sprintf("%q%q%q%d", ni.Name, ni.OrigName, ni.File, ni.StartLine)
+
+ if f := fm[fName]; f != nil {
+ return f, false
+ }
+
+ f := &profile.Function{
+ ID: uint64(len(fm) + 1),
+ Name: ni.Name,
+ SystemName: ni.OrigName,
+ Filename: ni.File,
+ StartLine: int64(ni.StartLine),
+ }
+ fm[fName] = f
+ return f, true
+}
+
+// printAssembly prints an annotated assembly listing.
+func printAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool) error {
+ return PrintAssembly(w, rpt, obj, -1)
+}
+
+// PrintAssembly prints annotated disassembly of rpt to w.
+func PrintAssembly(w io.Writer, rpt *Report, obj plugin.ObjTool, maxFuncs int) error {
+ o := rpt.options
+ prof := rpt.prof
+
+ g := rpt.newGraph(nil)
+
+ // If the regexp source can be parsed as an address, also match
+ // functions that land on that address.
+ var address *uint64
+ if hex, err := strconv.ParseUint(o.Symbol.String(), 0, 64); err == nil {
+ address = &hex
+ }
+
+ fmt.Fprintln(w, "Total:", rpt.formatValue(rpt.total))
+ symbols := symbolsFromBinaries(prof, g, o.Symbol, address, obj)
+ symNodes := nodesPerSymbol(g.Nodes, symbols)
+
+ // Sort for printing.
+ var syms []*objSymbol
+ for s := range symNodes {
+ syms = append(syms, s)
+ }
+ byName := func(a, b *objSymbol) bool {
+ if na, nb := a.sym.Name[0], b.sym.Name[0]; na != nb {
+ return na < nb
+ }
+ return a.sym.Start < b.sym.Start
+ }
+ if maxFuncs < 0 {
+ sort.Sort(orderSyms{syms, byName})
+ } else {
+ byFlatSum := func(a, b *objSymbol) bool {
+ suma, _ := symNodes[a].Sum()
+ sumb, _ := symNodes[b].Sum()
+ if suma != sumb {
+ return suma > sumb
+ }
+ return byName(a, b)
+ }
+ sort.Sort(orderSyms{syms, byFlatSum})
+ if len(syms) > maxFuncs {
+ syms = syms[:maxFuncs]
+ }
+ }
+
+ // Correlate the symbols from the binary with the profile samples.
+ for _, s := range syms {
+ sns := symNodes[s]
+
+ // Gather samples for this symbol.
+ flatSum, cumSum := sns.Sum()
+
+ // Get the function assembly.
+ insts, err := obj.Disasm(s.sym.File, s.sym.Start, s.sym.End, o.IntelSyntax)
+ if err != nil {
+ return err
+ }
+
+ ns := annotateAssembly(insts, sns, s.base)
+
+ fmt.Fprintf(w, "ROUTINE ======================== %s\n", s.sym.Name[0])
+ for _, name := range s.sym.Name[1:] {
+ fmt.Fprintf(w, " AKA ======================== %s\n", name)
+ }
+ fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n",
+ rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+ measurement.Percentage(cumSum, rpt.total))
+
+ function, file, line := "", "", 0
+ for _, n := range ns {
+ locStr := ""
+ // Skip loc information if it hasn't changed from previous instruction.
+ if n.function != function || n.file != file || n.line != line {
+ function, file, line = n.function, n.file, n.line
+ if n.function != "" {
+ locStr = n.function + " "
+ }
+ if n.file != "" {
+ locStr += n.file
+ if n.line != 0 {
+ locStr += fmt.Sprintf(":%d", n.line)
+ }
+ }
+ }
+ switch {
+ case locStr == "":
+ // No location info, just print the instruction.
+ fmt.Fprintf(w, "%10s %10s %10x: %s\n",
+ valueOrDot(n.flatValue(), rpt),
+ valueOrDot(n.cumValue(), rpt),
+ n.address, n.instruction,
+ )
+ case len(n.instruction) < 40:
+ // Short instruction, print loc on the same line.
+ fmt.Fprintf(w, "%10s %10s %10x: %-40s;%s\n",
+ valueOrDot(n.flatValue(), rpt),
+ valueOrDot(n.cumValue(), rpt),
+ n.address, n.instruction,
+ locStr,
+ )
+ default:
+ // Long instruction, print loc on a separate line.
+ fmt.Fprintf(w, "%74s;%s\n", "", locStr)
+ fmt.Fprintf(w, "%10s %10s %10x: %s\n",
+ valueOrDot(n.flatValue(), rpt),
+ valueOrDot(n.cumValue(), rpt),
+ n.address, n.instruction,
+ )
+ }
+ }
+ }
+ return nil
+}
+
+// symbolsFromBinaries examines the binaries listed on the profile
+// that have associated samples, and identifies symbols matching rx.
+func symbolsFromBinaries(prof *profile.Profile, g *graph.Graph, rx *regexp.Regexp, address *uint64, obj plugin.ObjTool) []*objSymbol {
+ hasSamples := make(map[string]bool)
+ // Only examine mappings that have samples that match the
+ // regexp. This is an optimization to speed up pprof.
+ for _, n := range g.Nodes {
+ if name := n.Info.PrintableName(); rx.MatchString(name) && n.Info.Objfile != "" {
+ hasSamples[n.Info.Objfile] = true
+ }
+ }
+
+ // Walk all mappings looking for matching functions with samples.
+ var objSyms []*objSymbol
+ for _, m := range prof.Mapping {
+ if !hasSamples[m.File] {
+ if address == nil || !(m.Start <= *address && *address <= m.Limit) {
+ continue
+ }
+ }
+
+ f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset)
+ if err != nil {
+ fmt.Printf("%v\n", err)
+ continue
+ }
+
+ // Find symbols in this binary matching the user regexp.
+ var addr uint64
+ if address != nil {
+ addr = *address
+ }
+ msyms, err := f.Symbols(rx, addr)
+ base := f.Base()
+ f.Close()
+ if err != nil {
+ continue
+ }
+ for _, ms := range msyms {
+ objSyms = append(objSyms,
+ &objSymbol{
+ sym: ms,
+ base: base,
+ file: f,
+ },
+ )
+ }
+ }
+
+ return objSyms
+}
+
+// objSym represents a symbol identified from a binary. It includes
+// the SymbolInfo from the disasm package and the base that must be
+// added to correspond to sample addresses
+type objSymbol struct {
+ sym *plugin.Sym
+ base uint64
+ file plugin.ObjFile
+}
+
+// orderSyms is a wrapper type to sort []*objSymbol by a supplied comparator.
+type orderSyms struct {
+ v []*objSymbol
+ less func(a, b *objSymbol) bool
+}
+
+func (o orderSyms) Len() int { return len(o.v) }
+func (o orderSyms) Less(i, j int) bool { return o.less(o.v[i], o.v[j]) }
+func (o orderSyms) Swap(i, j int) { o.v[i], o.v[j] = o.v[j], o.v[i] }
+
+// nodesPerSymbol classifies nodes into a group of symbols.
+func nodesPerSymbol(ns graph.Nodes, symbols []*objSymbol) map[*objSymbol]graph.Nodes {
+ symNodes := make(map[*objSymbol]graph.Nodes)
+ for _, s := range symbols {
+ // Gather samples for this symbol.
+ for _, n := range ns {
+ address := n.Info.Address - s.base
+ if address >= s.sym.Start && address < s.sym.End {
+ symNodes[s] = append(symNodes[s], n)
+ }
+ }
+ }
+ return symNodes
+}
+
+type assemblyInstruction struct {
+ address uint64
+ instruction string
+ function string
+ file string
+ line int
+ flat, cum int64
+ flatDiv, cumDiv int64
+ startsBlock bool
+ inlineCalls []callID
+}
+
+type callID struct {
+ file string
+ line int
+}
+
+func (a *assemblyInstruction) flatValue() int64 {
+ if a.flatDiv != 0 {
+ return a.flat / a.flatDiv
+ }
+ return a.flat
+}
+
+func (a *assemblyInstruction) cumValue() int64 {
+ if a.cumDiv != 0 {
+ return a.cum / a.cumDiv
+ }
+ return a.cum
+}
+
+// annotateAssembly annotates a set of assembly instructions with a
+// set of samples. It returns a set of nodes to display. base is an
+// offset to adjust the sample addresses.
+func annotateAssembly(insts []plugin.Inst, samples graph.Nodes, base uint64) []assemblyInstruction {
+ // Add end marker to simplify printing loop.
+ insts = append(insts, plugin.Inst{
+ Addr: ^uint64(0),
+ })
+
+ // Ensure samples are sorted by address.
+ samples.Sort(graph.AddressOrder)
+
+ s := 0
+ asm := make([]assemblyInstruction, 0, len(insts))
+ for ix, in := range insts[:len(insts)-1] {
+ n := assemblyInstruction{
+ address: in.Addr,
+ instruction: in.Text,
+ function: in.Function,
+ line: in.Line,
+ }
+ if in.File != "" {
+ n.file = filepath.Base(in.File)
+ }
+
+ // Sum all the samples until the next instruction (to account
+ // for samples attributed to the middle of an instruction).
+ for next := insts[ix+1].Addr; s < len(samples) && samples[s].Info.Address-base < next; s++ {
+ sample := samples[s]
+ n.flatDiv += sample.FlatDiv
+ n.flat += sample.Flat
+ n.cumDiv += sample.CumDiv
+ n.cum += sample.Cum
+ if f := sample.Info.File; f != "" && n.file == "" {
+ n.file = filepath.Base(f)
+ }
+ if ln := sample.Info.Lineno; ln != 0 && n.line == 0 {
+ n.line = ln
+ }
+ if f := sample.Info.Name; f != "" && n.function == "" {
+ n.function = f
+ }
+ }
+ asm = append(asm, n)
+ }
+
+ return asm
+}
+
+// valueOrDot formats a value according to a report, intercepting zero
+// values.
+func valueOrDot(value int64, rpt *Report) string {
+ if value == 0 {
+ return "."
+ }
+ return rpt.formatValue(value)
+}
+
+// printTags collects all tags referenced in the profile and prints
+// them in a sorted table.
+func printTags(w io.Writer, rpt *Report) error {
+ p := rpt.prof
+
+ o := rpt.options
+ formatTag := func(v int64, key string) string {
+ return measurement.ScaledLabel(v, key, o.OutputUnit)
+ }
+
+ // Hashtable to keep accumulate tags as key,value,count.
+ tagMap := make(map[string]map[string]int64)
+ for _, s := range p.Sample {
+ for key, vals := range s.Label {
+ for _, val := range vals {
+ valueMap, ok := tagMap[key]
+ if !ok {
+ valueMap = make(map[string]int64)
+ tagMap[key] = valueMap
+ }
+ valueMap[val] += o.SampleValue(s.Value)
+ }
+ }
+ for key, vals := range s.NumLabel {
+ unit := o.NumLabelUnits[key]
+ for _, nval := range vals {
+ val := formatTag(nval, unit)
+ valueMap, ok := tagMap[key]
+ if !ok {
+ valueMap = make(map[string]int64)
+ tagMap[key] = valueMap
+ }
+ valueMap[val] += o.SampleValue(s.Value)
+ }
+ }
+ }
+
+ tagKeys := make([]*graph.Tag, 0, len(tagMap))
+ for key := range tagMap {
+ tagKeys = append(tagKeys, &graph.Tag{Name: key})
+ }
+ tabw := tabwriter.NewWriter(w, 0, 0, 1, ' ', tabwriter.AlignRight)
+ for _, tagKey := range graph.SortTags(tagKeys, true) {
+ var total int64
+ key := tagKey.Name
+ tags := make([]*graph.Tag, 0, len(tagMap[key]))
+ for t, c := range tagMap[key] {
+ total += c
+ tags = append(tags, &graph.Tag{Name: t, Flat: c})
+ }
+
+ f, u := measurement.Scale(total, o.SampleUnit, o.OutputUnit)
+ fmt.Fprintf(tabw, "%s:\t Total %.1f%s\n", key, f, u)
+ for _, t := range graph.SortTags(tags, true) {
+ f, u := measurement.Scale(t.FlatValue(), o.SampleUnit, o.OutputUnit)
+ if total > 0 {
+ fmt.Fprintf(tabw, " \t%.1f%s (%s):\t %s\n", f, u, measurement.Percentage(t.FlatValue(), total), t.Name)
+ } else {
+ fmt.Fprintf(tabw, " \t%.1f%s:\t %s\n", f, u, t.Name)
+ }
+ }
+ fmt.Fprintln(tabw)
+ }
+ return tabw.Flush()
+}
+
+// printComments prints all freeform comments in the profile.
+func printComments(w io.Writer, rpt *Report) error {
+ p := rpt.prof
+
+ for _, c := range p.Comments {
+ fmt.Fprintln(w, c)
+ }
+ return nil
+}
+
+// TextItem holds a single text report entry.
+type TextItem struct {
+ Name string
+ InlineLabel string // Not empty if inlined
+ Flat, Cum int64 // Raw values
+ FlatFormat, CumFormat string // Formatted values
+}
+
+// TextItems returns a list of text items from the report and a list
+// of labels that describe the report.
+func TextItems(rpt *Report) ([]TextItem, []string) {
+ g, origCount, droppedNodes, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+ labels := reportLabels(rpt, g, origCount, droppedNodes, 0, false)
+
+ var items []TextItem
+ var flatSum int64
+ for _, n := range g.Nodes {
+ name, flat, cum := n.Info.PrintableName(), n.FlatValue(), n.CumValue()
+
+ var inline, noinline bool
+ for _, e := range n.In {
+ if e.Inline {
+ inline = true
+ } else {
+ noinline = true
+ }
+ }
+
+ var inl string
+ if inline {
+ if noinline {
+ inl = "(partial-inline)"
+ } else {
+ inl = "(inline)"
+ }
+ }
+
+ flatSum += flat
+ items = append(items, TextItem{
+ Name: name,
+ InlineLabel: inl,
+ Flat: flat,
+ Cum: cum,
+ FlatFormat: rpt.formatValue(flat),
+ CumFormat: rpt.formatValue(cum),
+ })
+ }
+ return items, labels
+}
+
+// printText prints a flat text report for a profile.
+func printText(w io.Writer, rpt *Report) error {
+ items, labels := TextItems(rpt)
+ fmt.Fprintln(w, strings.Join(labels, "\n"))
+ fmt.Fprintf(w, "%10s %5s%% %5s%% %10s %5s%%\n",
+ "flat", "flat", "sum", "cum", "cum")
+ var flatSum int64
+ for _, item := range items {
+ inl := item.InlineLabel
+ if inl != "" {
+ inl = " " + inl
+ }
+ flatSum += item.Flat
+ fmt.Fprintf(w, "%10s %s %s %10s %s %s%s\n",
+ item.FlatFormat, measurement.Percentage(item.Flat, rpt.total),
+ measurement.Percentage(flatSum, rpt.total),
+ item.CumFormat, measurement.Percentage(item.Cum, rpt.total),
+ item.Name, inl)
+ }
+ return nil
+}
+
+// printTraces prints all traces from a profile.
+func printTraces(w io.Writer, rpt *Report) error {
+ fmt.Fprintln(w, strings.Join(ProfileLabels(rpt), "\n"))
+
+ prof := rpt.prof
+ o := rpt.options
+
+ const separator = "-----------+-------------------------------------------------------"
+
+ _, locations := graph.CreateNodes(prof, &graph.Options{})
+ for _, sample := range prof.Sample {
+ type stk struct {
+ *graph.NodeInfo
+ inline bool
+ }
+ var stack []stk
+ for _, loc := range sample.Location {
+ nodes := locations[loc.ID]
+ for i, n := range nodes {
+ // The inline flag may be inaccurate if 'show' or 'hide' filter is
+ // used. See https://github.com/google/pprof/issues/511.
+ inline := i != len(nodes)-1
+ stack = append(stack, stk{&n.Info, inline})
+ }
+ }
+
+ if len(stack) == 0 {
+ continue
+ }
+
+ fmt.Fprintln(w, separator)
+ // Print any text labels for the sample.
+ var labels []string
+ for s, vs := range sample.Label {
+ labels = append(labels, fmt.Sprintf("%10s: %s\n", s, strings.Join(vs, " ")))
+ }
+ sort.Strings(labels)
+ fmt.Fprint(w, strings.Join(labels, ""))
+
+ // Print any numeric labels for the sample
+ var numLabels []string
+ for key, vals := range sample.NumLabel {
+ unit := o.NumLabelUnits[key]
+ numValues := make([]string, len(vals))
+ for i, vv := range vals {
+ numValues[i] = measurement.Label(vv, unit)
+ }
+ numLabels = append(numLabels, fmt.Sprintf("%10s: %s\n", key, strings.Join(numValues, " ")))
+ }
+ sort.Strings(numLabels)
+ fmt.Fprint(w, strings.Join(numLabels, ""))
+
+ var d, v int64
+ v = o.SampleValue(sample.Value)
+ if o.SampleMeanDivisor != nil {
+ d = o.SampleMeanDivisor(sample.Value)
+ }
+ // Print call stack.
+ if d != 0 {
+ v = v / d
+ }
+ for i, s := range stack {
+ var vs, inline string
+ if i == 0 {
+ vs = rpt.formatValue(v)
+ }
+ if s.inline {
+ inline = " (inline)"
+ }
+ fmt.Fprintf(w, "%10s %s%s\n", vs, s.PrintableName(), inline)
+ }
+ }
+ fmt.Fprintln(w, separator)
+ return nil
+}
+
+// printCallgrind prints a graph for a profile on callgrind format.
+func printCallgrind(w io.Writer, rpt *Report) error {
+ o := rpt.options
+ rpt.options.NodeFraction = 0
+ rpt.options.EdgeFraction = 0
+ rpt.options.NodeCount = 0
+
+ g, _, _, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+
+ nodeNames := getDisambiguatedNames(g)
+
+ fmt.Fprintln(w, "positions: instr line")
+ fmt.Fprintln(w, "events:", o.SampleType+"("+o.OutputUnit+")")
+
+ objfiles := make(map[string]int)
+ files := make(map[string]int)
+ names := make(map[string]int)
+
+ // prevInfo points to the previous NodeInfo.
+ // It is used to group cost lines together as much as possible.
+ var prevInfo *graph.NodeInfo
+ for _, n := range g.Nodes {
+ if prevInfo == nil || n.Info.Objfile != prevInfo.Objfile || n.Info.File != prevInfo.File || n.Info.Name != prevInfo.Name {
+ fmt.Fprintln(w)
+ fmt.Fprintln(w, "ob="+callgrindName(objfiles, n.Info.Objfile))
+ fmt.Fprintln(w, "fl="+callgrindName(files, n.Info.File))
+ fmt.Fprintln(w, "fn="+callgrindName(names, n.Info.Name))
+ }
+
+ addr := callgrindAddress(prevInfo, n.Info.Address)
+ sv, _ := measurement.Scale(n.FlatValue(), o.SampleUnit, o.OutputUnit)
+ fmt.Fprintf(w, "%s %d %d\n", addr, n.Info.Lineno, int64(sv))
+
+ // Print outgoing edges.
+ for _, out := range n.Out.Sort() {
+ c, _ := measurement.Scale(out.Weight, o.SampleUnit, o.OutputUnit)
+ callee := out.Dest
+ fmt.Fprintln(w, "cfl="+callgrindName(files, callee.Info.File))
+ fmt.Fprintln(w, "cfn="+callgrindName(names, nodeNames[callee]))
+ // pprof doesn't have a flat weight for a call, leave as 0.
+ fmt.Fprintf(w, "calls=0 %s %d\n", callgrindAddress(prevInfo, callee.Info.Address), callee.Info.Lineno)
+ // TODO: This address may be in the middle of a call
+ // instruction. It would be best to find the beginning
+ // of the instruction, but the tools seem to handle
+ // this OK.
+ fmt.Fprintf(w, "* * %d\n", int64(c))
+ }
+
+ prevInfo = &n.Info
+ }
+
+ return nil
+}
+
+// getDisambiguatedNames returns a map from each node in the graph to
+// the name to use in the callgrind output. Callgrind merges all
+// functions with the same [file name, function name]. Add a [%d/n]
+// suffix to disambiguate nodes with different values of
+// node.Function, which we want to keep separate. In particular, this
+// affects graphs created with --call_tree, where nodes from different
+// contexts are associated to different Functions.
+func getDisambiguatedNames(g *graph.Graph) map[*graph.Node]string {
+ nodeName := make(map[*graph.Node]string, len(g.Nodes))
+
+ type names struct {
+ file, function string
+ }
+
+ // nameFunctionIndex maps the callgrind names (filename, function)
+ // to the node.Function values found for that name, and each
+ // node.Function value to a sequential index to be used on the
+ // disambiguated name.
+ nameFunctionIndex := make(map[names]map[*graph.Node]int)
+ for _, n := range g.Nodes {
+ nm := names{n.Info.File, n.Info.Name}
+ p, ok := nameFunctionIndex[nm]
+ if !ok {
+ p = make(map[*graph.Node]int)
+ nameFunctionIndex[nm] = p
+ }
+ if _, ok := p[n.Function]; !ok {
+ p[n.Function] = len(p)
+ }
+ }
+
+ for _, n := range g.Nodes {
+ nm := names{n.Info.File, n.Info.Name}
+ nodeName[n] = n.Info.Name
+ if p := nameFunctionIndex[nm]; len(p) > 1 {
+ // If there is more than one function, add suffix to disambiguate.
+ nodeName[n] += fmt.Sprintf(" [%d/%d]", p[n.Function]+1, len(p))
+ }
+ }
+ return nodeName
+}
+
+// callgrindName implements the callgrind naming compression scheme.
+// For names not previously seen returns "(N) name", where N is a
+// unique index. For names previously seen returns "(N)" where N is
+// the index returned the first time.
+func callgrindName(names map[string]int, name string) string {
+ if name == "" {
+ return ""
+ }
+ if id, ok := names[name]; ok {
+ return fmt.Sprintf("(%d)", id)
+ }
+ id := len(names) + 1
+ names[name] = id
+ return fmt.Sprintf("(%d) %s", id, name)
+}
+
+// callgrindAddress implements the callgrind subposition compression scheme if
+// possible. If prevInfo != nil, it contains the previous address. The current
+// address can be given relative to the previous address, with an explicit +/-
+// to indicate it is relative, or * for the same address.
+func callgrindAddress(prevInfo *graph.NodeInfo, curr uint64) string {
+ abs := fmt.Sprintf("%#x", curr)
+ if prevInfo == nil {
+ return abs
+ }
+
+ prev := prevInfo.Address
+ if prev == curr {
+ return "*"
+ }
+
+ diff := int64(curr - prev)
+ relative := fmt.Sprintf("%+d", diff)
+
+ // Only bother to use the relative address if it is actually shorter.
+ if len(relative) < len(abs) {
+ return relative
+ }
+
+ return abs
+}
+
+// printTree prints a tree-based report in text form.
+func printTree(w io.Writer, rpt *Report) error {
+ const separator = "----------------------------------------------------------+-------------"
+ const legend = " flat flat% sum% cum cum% calls calls% + context "
+
+ g, origCount, droppedNodes, _ := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+
+ fmt.Fprintln(w, strings.Join(reportLabels(rpt, g, origCount, droppedNodes, 0, false), "\n"))
+
+ fmt.Fprintln(w, separator)
+ fmt.Fprintln(w, legend)
+ var flatSum int64
+
+ rx := rpt.options.Symbol
+ for _, n := range g.Nodes {
+ name, flat, cum := n.Info.PrintableName(), n.FlatValue(), n.CumValue()
+
+ // Skip any entries that do not match the regexp (for the "peek" command).
+ if rx != nil && !rx.MatchString(name) {
+ continue
+ }
+
+ fmt.Fprintln(w, separator)
+ // Print incoming edges.
+ inEdges := n.In.Sort()
+ for _, in := range inEdges {
+ var inline string
+ if in.Inline {
+ inline = " (inline)"
+ }
+ fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(in.Weight),
+ measurement.Percentage(in.Weight, cum), in.Src.Info.PrintableName(), inline)
+ }
+
+ // Print current node.
+ flatSum += flat
+ fmt.Fprintf(w, "%10s %s %s %10s %s | %s\n",
+ rpt.formatValue(flat),
+ measurement.Percentage(flat, rpt.total),
+ measurement.Percentage(flatSum, rpt.total),
+ rpt.formatValue(cum),
+ measurement.Percentage(cum, rpt.total),
+ name)
+
+ // Print outgoing edges.
+ outEdges := n.Out.Sort()
+ for _, out := range outEdges {
+ var inline string
+ if out.Inline {
+ inline = " (inline)"
+ }
+ fmt.Fprintf(w, "%50s %s | %s%s\n", rpt.formatValue(out.Weight),
+ measurement.Percentage(out.Weight, cum), out.Dest.Info.PrintableName(), inline)
+ }
+ }
+ if len(g.Nodes) > 0 {
+ fmt.Fprintln(w, separator)
+ }
+ return nil
+}
+
+// GetDOT returns a graph suitable for dot processing along with some
+// configuration information.
+func GetDOT(rpt *Report) (*graph.Graph, *graph.DotConfig) {
+ g, origCount, droppedNodes, droppedEdges := rpt.newTrimmedGraph()
+ rpt.selectOutputUnit(g)
+ labels := reportLabels(rpt, g, origCount, droppedNodes, droppedEdges, true)
+
+ c := &graph.DotConfig{
+ Title: rpt.options.Title,
+ Labels: labels,
+ FormatValue: rpt.formatValue,
+ Total: rpt.total,
+ }
+ return g, c
+}
+
+// printDOT prints an annotated callgraph in DOT format.
+func printDOT(w io.Writer, rpt *Report) error {
+ g, c := GetDOT(rpt)
+ graph.ComposeDot(w, g, &graph.DotAttributes{}, c)
+ return nil
+}
+
+// ProfileLabels returns printable labels for a profile.
+func ProfileLabels(rpt *Report) []string {
+ label := []string{}
+ prof := rpt.prof
+ o := rpt.options
+ if len(prof.Mapping) > 0 {
+ if prof.Mapping[0].File != "" {
+ label = append(label, "File: "+filepath.Base(prof.Mapping[0].File))
+ }
+ if prof.Mapping[0].BuildID != "" {
+ label = append(label, "Build ID: "+prof.Mapping[0].BuildID)
+ }
+ }
+ // Only include comments that do not start with '#'.
+ for _, c := range prof.Comments {
+ if !strings.HasPrefix(c, "#") {
+ label = append(label, c)
+ }
+ }
+ if o.SampleType != "" {
+ label = append(label, "Type: "+o.SampleType)
+ }
+ if prof.TimeNanos != 0 {
+ const layout = "Jan 2, 2006 at 3:04pm (MST)"
+ label = append(label, "Time: "+time.Unix(0, prof.TimeNanos).Format(layout))
+ }
+ if prof.DurationNanos != 0 {
+ duration := measurement.Label(prof.DurationNanos, "nanoseconds")
+ totalNanos, totalUnit := measurement.Scale(rpt.total, o.SampleUnit, "nanoseconds")
+ var ratio string
+ if totalUnit == "ns" && totalNanos != 0 {
+ ratio = "(" + measurement.Percentage(int64(totalNanos), prof.DurationNanos) + ")"
+ }
+ label = append(label, fmt.Sprintf("Duration: %s, Total samples = %s %s", duration, rpt.formatValue(rpt.total), ratio))
+ }
+ return label
+}
+
+// reportLabels returns printable labels for a report. Includes
+// profileLabels.
+func reportLabels(rpt *Report, g *graph.Graph, origCount, droppedNodes, droppedEdges int, fullHeaders bool) []string {
+ nodeFraction := rpt.options.NodeFraction
+ edgeFraction := rpt.options.EdgeFraction
+ nodeCount := len(g.Nodes)
+
+ var label []string
+ if len(rpt.options.ProfileLabels) > 0 {
+ label = append(label, rpt.options.ProfileLabels...)
+ } else if fullHeaders || !rpt.options.CompactLabels {
+ label = ProfileLabels(rpt)
+ }
+
+ var flatSum int64
+ for _, n := range g.Nodes {
+ flatSum = flatSum + n.FlatValue()
+ }
+
+ if len(rpt.options.ActiveFilters) > 0 {
+ activeFilters := legendActiveFilters(rpt.options.ActiveFilters)
+ label = append(label, activeFilters...)
+ }
+
+ label = append(label, fmt.Sprintf("Showing nodes accounting for %s, %s of %s total", rpt.formatValue(flatSum), strings.TrimSpace(measurement.Percentage(flatSum, rpt.total)), rpt.formatValue(rpt.total)))
+
+ if rpt.total != 0 {
+ if droppedNodes > 0 {
+ label = append(label, genLabel(droppedNodes, "node", "cum",
+ rpt.formatValue(abs64(int64(float64(rpt.total)*nodeFraction)))))
+ }
+ if droppedEdges > 0 {
+ label = append(label, genLabel(droppedEdges, "edge", "freq",
+ rpt.formatValue(abs64(int64(float64(rpt.total)*edgeFraction)))))
+ }
+ if nodeCount > 0 && nodeCount < origCount {
+ label = append(label, fmt.Sprintf("Showing top %d nodes out of %d",
+ nodeCount, origCount))
+ }
+ }
+
+ // Help new users understand the graph.
+ // A new line is intentionally added here to better show this message.
+ if fullHeaders {
+ label = append(label, "\nSee https://git.io/JfYMW for how to read the graph")
+ }
+
+ return label
+}
+
+func legendActiveFilters(activeFilters []string) []string {
+ legendActiveFilters := make([]string, len(activeFilters)+1)
+ legendActiveFilters[0] = "Active filters:"
+ for i, s := range activeFilters {
+ if len(s) > 80 {
+ s = s[:80] + "…"
+ }
+ legendActiveFilters[i+1] = " " + s
+ }
+ return legendActiveFilters
+}
+
+func genLabel(d int, n, l, f string) string {
+ if d > 1 {
+ n = n + "s"
+ }
+ return fmt.Sprintf("Dropped %d %s (%s <= %s)", d, n, l, f)
+}
+
+// New builds a new report indexing the sample values interpreting the
+// samples with the provided function.
+func New(prof *profile.Profile, o *Options) *Report {
+ format := func(v int64) string {
+ if r := o.Ratio; r > 0 && r != 1 {
+ fv := float64(v) * r
+ v = int64(fv)
+ }
+ return measurement.ScaledLabel(v, o.SampleUnit, o.OutputUnit)
+ }
+ return &Report{prof, computeTotal(prof, o.SampleValue, o.SampleMeanDivisor),
+ o, format}
+}
+
+// NewDefault builds a new report indexing the last sample value
+// available.
+func NewDefault(prof *profile.Profile, options Options) *Report {
+ index := len(prof.SampleType) - 1
+ o := &options
+ if o.Title == "" && len(prof.Mapping) > 0 && prof.Mapping[0].File != "" {
+ o.Title = filepath.Base(prof.Mapping[0].File)
+ }
+ o.SampleType = prof.SampleType[index].Type
+ o.SampleUnit = strings.ToLower(prof.SampleType[index].Unit)
+ o.SampleValue = func(v []int64) int64 {
+ return v[index]
+ }
+ return New(prof, o)
+}
+
+// computeTotal computes the sum of the absolute value of all sample values.
+// If any samples have label indicating they belong to the diff base, then the
+// total will only include samples with that label.
+func computeTotal(prof *profile.Profile, value, meanDiv func(v []int64) int64) int64 {
+ var div, total, diffDiv, diffTotal int64
+ for _, sample := range prof.Sample {
+ var d, v int64
+ v = value(sample.Value)
+ if meanDiv != nil {
+ d = meanDiv(sample.Value)
+ }
+ if v < 0 {
+ v = -v
+ }
+ total += v
+ div += d
+ if sample.DiffBaseSample() {
+ diffTotal += v
+ diffDiv += d
+ }
+ }
+ if diffTotal > 0 {
+ total = diffTotal
+ div = diffDiv
+ }
+ if div != 0 {
+ return total / div
+ }
+ return total
+}
+
+// Report contains the data and associated routines to extract a
+// report from a profile.
+type Report struct {
+ prof *profile.Profile
+ total int64
+ options *Options
+ formatValue func(int64) string
+}
+
+// Total returns the total number of samples in a report.
+func (rpt *Report) Total() int64 { return rpt.total }
+
+func abs64(i int64) int64 {
+ if i < 0 {
+ return -i
+ }
+ return i
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/source.go b/src/cmd/vendor/github.com/google/pprof/internal/report/source.go
new file mode 100644
index 0000000..b480535
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/source.go
@@ -0,0 +1,653 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package report
+
+// This file contains routines related to the generation of annotated
+// source listings.
+
+import (
+ "bufio"
+ "fmt"
+ "html/template"
+ "io"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/graph"
+ "github.com/google/pprof/internal/measurement"
+ "github.com/google/pprof/internal/plugin"
+)
+
+// printSource prints an annotated source listing, include all
+// functions with samples that match the regexp rpt.options.symbol.
+// The sources are sorted by function name and then by filename to
+// eliminate potential nondeterminism.
+func printSource(w io.Writer, rpt *Report) error {
+ o := rpt.options
+ g := rpt.newGraph(nil)
+
+ // Identify all the functions that match the regexp provided.
+ // Group nodes for each matching function.
+ var functions graph.Nodes
+ functionNodes := make(map[string]graph.Nodes)
+ for _, n := range g.Nodes {
+ if !o.Symbol.MatchString(n.Info.Name) {
+ continue
+ }
+ if functionNodes[n.Info.Name] == nil {
+ functions = append(functions, n)
+ }
+ functionNodes[n.Info.Name] = append(functionNodes[n.Info.Name], n)
+ }
+ functions.Sort(graph.NameOrder)
+
+ sourcePath := o.SourcePath
+ if sourcePath == "" {
+ wd, err := os.Getwd()
+ if err != nil {
+ return fmt.Errorf("could not stat current dir: %v", err)
+ }
+ sourcePath = wd
+ }
+ reader := newSourceReader(sourcePath, o.TrimPath)
+
+ fmt.Fprintf(w, "Total: %s\n", rpt.formatValue(rpt.total))
+ for _, fn := range functions {
+ name := fn.Info.Name
+
+ // Identify all the source files associated to this function.
+ // Group nodes for each source file.
+ var sourceFiles graph.Nodes
+ fileNodes := make(map[string]graph.Nodes)
+ for _, n := range functionNodes[name] {
+ if n.Info.File == "" {
+ continue
+ }
+ if fileNodes[n.Info.File] == nil {
+ sourceFiles = append(sourceFiles, n)
+ }
+ fileNodes[n.Info.File] = append(fileNodes[n.Info.File], n)
+ }
+
+ if len(sourceFiles) == 0 {
+ fmt.Fprintf(w, "No source information for %s\n", name)
+ continue
+ }
+
+ sourceFiles.Sort(graph.FileOrder)
+
+ // Print each file associated with this function.
+ for _, fl := range sourceFiles {
+ filename := fl.Info.File
+ fns := fileNodes[filename]
+ flatSum, cumSum := fns.Sum()
+
+ fnodes, _, err := getSourceFromFile(filename, reader, fns, 0, 0)
+ fmt.Fprintf(w, "ROUTINE ======================== %s in %s\n", name, filename)
+ fmt.Fprintf(w, "%10s %10s (flat, cum) %s of Total\n",
+ rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+ measurement.Percentage(cumSum, rpt.total))
+
+ if err != nil {
+ fmt.Fprintf(w, " Error: %v\n", err)
+ continue
+ }
+
+ for _, fn := range fnodes {
+ fmt.Fprintf(w, "%10s %10s %6d:%s\n", valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt), fn.Info.Lineno, fn.Info.Name)
+ }
+ }
+ }
+ return nil
+}
+
+// printWebSource prints an annotated source listing, include all
+// functions with samples that match the regexp rpt.options.symbol.
+func printWebSource(w io.Writer, rpt *Report, obj plugin.ObjTool) error {
+ printHeader(w, rpt)
+ if err := PrintWebList(w, rpt, obj, -1); err != nil {
+ return err
+ }
+ printPageClosing(w)
+ return nil
+}
+
+// PrintWebList prints annotated source listing of rpt to w.
+func PrintWebList(w io.Writer, rpt *Report, obj plugin.ObjTool, maxFiles int) error {
+ o := rpt.options
+ g := rpt.newGraph(nil)
+
+ // If the regexp source can be parsed as an address, also match
+ // functions that land on that address.
+ var address *uint64
+ if hex, err := strconv.ParseUint(o.Symbol.String(), 0, 64); err == nil {
+ address = &hex
+ }
+
+ sourcePath := o.SourcePath
+ if sourcePath == "" {
+ wd, err := os.Getwd()
+ if err != nil {
+ return fmt.Errorf("could not stat current dir: %v", err)
+ }
+ sourcePath = wd
+ }
+ reader := newSourceReader(sourcePath, o.TrimPath)
+
+ type fileFunction struct {
+ fileName, functionName string
+ }
+
+ // Extract interesting symbols from binary files in the profile and
+ // classify samples per symbol.
+ symbols := symbolsFromBinaries(rpt.prof, g, o.Symbol, address, obj)
+ symNodes := nodesPerSymbol(g.Nodes, symbols)
+
+ // Identify sources associated to a symbol by examining
+ // symbol samples. Classify samples per source file.
+ fileNodes := make(map[fileFunction]graph.Nodes)
+ if len(symNodes) == 0 {
+ for _, n := range g.Nodes {
+ if n.Info.File == "" || !o.Symbol.MatchString(n.Info.Name) {
+ continue
+ }
+ ff := fileFunction{n.Info.File, n.Info.Name}
+ fileNodes[ff] = append(fileNodes[ff], n)
+ }
+ } else {
+ for _, nodes := range symNodes {
+ for _, n := range nodes {
+ if n.Info.File != "" {
+ ff := fileFunction{n.Info.File, n.Info.Name}
+ fileNodes[ff] = append(fileNodes[ff], n)
+ }
+ }
+ }
+ }
+
+ if len(fileNodes) == 0 {
+ return fmt.Errorf("no source information for %s", o.Symbol.String())
+ }
+
+ sourceFiles := make(graph.Nodes, 0, len(fileNodes))
+ for _, nodes := range fileNodes {
+ sNode := *nodes[0]
+ sNode.Flat, sNode.Cum = nodes.Sum()
+ sourceFiles = append(sourceFiles, &sNode)
+ }
+
+ // Limit number of files printed?
+ if maxFiles < 0 {
+ sourceFiles.Sort(graph.FileOrder)
+ } else {
+ sourceFiles.Sort(graph.FlatNameOrder)
+ if maxFiles < len(sourceFiles) {
+ sourceFiles = sourceFiles[:maxFiles]
+ }
+ }
+
+ // Print each file associated with this function.
+ for _, n := range sourceFiles {
+ ff := fileFunction{n.Info.File, n.Info.Name}
+ fns := fileNodes[ff]
+
+ asm := assemblyPerSourceLine(symbols, fns, ff.fileName, obj, o.IntelSyntax)
+ start, end := sourceCoordinates(asm)
+
+ fnodes, path, err := getSourceFromFile(ff.fileName, reader, fns, start, end)
+ if err != nil {
+ fnodes, path = getMissingFunctionSource(ff.fileName, asm, start, end)
+ }
+
+ printFunctionHeader(w, ff.functionName, path, n.Flat, n.Cum, rpt)
+ for _, fn := range fnodes {
+ printFunctionSourceLine(w, fn, asm[fn.Info.Lineno], reader, rpt)
+ }
+ printFunctionClosing(w)
+ }
+ return nil
+}
+
+// sourceCoordinates returns the lowest and highest line numbers from
+// a set of assembly statements.
+func sourceCoordinates(asm map[int][]assemblyInstruction) (start, end int) {
+ for l := range asm {
+ if start == 0 || l < start {
+ start = l
+ }
+ if end == 0 || l > end {
+ end = l
+ }
+ }
+ return start, end
+}
+
+// assemblyPerSourceLine disassembles the binary containing a symbol
+// and classifies the assembly instructions according to its
+// corresponding source line, annotating them with a set of samples.
+func assemblyPerSourceLine(objSyms []*objSymbol, rs graph.Nodes, src string, obj plugin.ObjTool, intelSyntax bool) map[int][]assemblyInstruction {
+ assembly := make(map[int][]assemblyInstruction)
+ // Identify symbol to use for this collection of samples.
+ o := findMatchingSymbol(objSyms, rs)
+ if o == nil {
+ return assembly
+ }
+
+ // Extract assembly for matched symbol
+ insts, err := obj.Disasm(o.sym.File, o.sym.Start, o.sym.End, intelSyntax)
+ if err != nil {
+ return assembly
+ }
+
+ srcBase := filepath.Base(src)
+ anodes := annotateAssembly(insts, rs, o.base)
+ var lineno = 0
+ var prevline = 0
+ for _, an := range anodes {
+ // Do not rely solely on the line number produced by Disasm
+ // since it is not what we want in the presence of inlining.
+ //
+ // E.g., suppose we are printing source code for F and this
+ // instruction is from H where F called G called H and both
+ // of those calls were inlined. We want to use the line
+ // number from F, not from H (which is what Disasm gives us).
+ //
+ // So find the outer-most linenumber in the source file.
+ found := false
+ if frames, err := o.file.SourceLine(an.address + o.base); err == nil {
+ for i := len(frames) - 1; i >= 0; i-- {
+ if filepath.Base(frames[i].File) == srcBase {
+ for j := i - 1; j >= 0; j-- {
+ an.inlineCalls = append(an.inlineCalls, callID{frames[j].File, frames[j].Line})
+ }
+ lineno = frames[i].Line
+ found = true
+ break
+ }
+ }
+ }
+ if !found && filepath.Base(an.file) == srcBase {
+ lineno = an.line
+ }
+
+ if lineno != 0 {
+ if lineno != prevline {
+ // This instruction starts a new block
+ // of contiguous instructions on this line.
+ an.startsBlock = true
+ }
+ prevline = lineno
+ assembly[lineno] = append(assembly[lineno], an)
+ }
+ }
+
+ return assembly
+}
+
+// findMatchingSymbol looks for the symbol that corresponds to a set
+// of samples, by comparing their addresses.
+func findMatchingSymbol(objSyms []*objSymbol, ns graph.Nodes) *objSymbol {
+ for _, n := range ns {
+ for _, o := range objSyms {
+ if filepath.Base(o.sym.File) == filepath.Base(n.Info.Objfile) &&
+ o.sym.Start <= n.Info.Address-o.base &&
+ n.Info.Address-o.base <= o.sym.End {
+ return o
+ }
+ }
+ }
+ return nil
+}
+
+// printHeader prints the page header for a weblist report.
+func printHeader(w io.Writer, rpt *Report) {
+ fmt.Fprintln(w, `
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="UTF-8">
+<title>Pprof listing</title>`)
+ fmt.Fprintln(w, weblistPageCSS)
+ fmt.Fprintln(w, weblistPageScript)
+ fmt.Fprint(w, "</head>\n<body>\n\n")
+
+ var labels []string
+ for _, l := range ProfileLabels(rpt) {
+ labels = append(labels, template.HTMLEscapeString(l))
+ }
+
+ fmt.Fprintf(w, `<div class="legend">%s<br>Total: %s</div>`,
+ strings.Join(labels, "<br>\n"),
+ rpt.formatValue(rpt.total),
+ )
+}
+
+// printFunctionHeader prints a function header for a weblist report.
+func printFunctionHeader(w io.Writer, name, path string, flatSum, cumSum int64, rpt *Report) {
+ fmt.Fprintf(w, `<h2>%s</h2><p class="filename">%s</p>
+<pre onClick="pprof_toggle_asm(event)">
+ Total: %10s %10s (flat, cum) %s
+`,
+ template.HTMLEscapeString(name), template.HTMLEscapeString(path),
+ rpt.formatValue(flatSum), rpt.formatValue(cumSum),
+ measurement.Percentage(cumSum, rpt.total))
+}
+
+// printFunctionSourceLine prints a source line and the corresponding assembly.
+func printFunctionSourceLine(w io.Writer, fn *graph.Node, assembly []assemblyInstruction, reader *sourceReader, rpt *Report) {
+ if len(assembly) == 0 {
+ fmt.Fprintf(w,
+ "<span class=line> %6d</span> <span class=nop> %10s %10s %8s %s </span>\n",
+ fn.Info.Lineno,
+ valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt),
+ "", template.HTMLEscapeString(fn.Info.Name))
+ return
+ }
+
+ fmt.Fprintf(w,
+ "<span class=line> %6d</span> <span class=deadsrc> %10s %10s %8s %s </span>",
+ fn.Info.Lineno,
+ valueOrDot(fn.Flat, rpt), valueOrDot(fn.Cum, rpt),
+ "", template.HTMLEscapeString(fn.Info.Name))
+ srcIndent := indentation(fn.Info.Name)
+ fmt.Fprint(w, "<span class=asm>")
+ var curCalls []callID
+ for i, an := range assembly {
+ if an.startsBlock && i != 0 {
+ // Insert a separator between discontiguous blocks.
+ fmt.Fprintf(w, " %8s %28s\n", "", "⋮")
+ }
+
+ var fileline string
+ if an.file != "" {
+ fileline = fmt.Sprintf("%s:%d", template.HTMLEscapeString(an.file), an.line)
+ }
+ flat, cum := an.flat, an.cum
+ if an.flatDiv != 0 {
+ flat = flat / an.flatDiv
+ }
+ if an.cumDiv != 0 {
+ cum = cum / an.cumDiv
+ }
+
+ // Print inlined call context.
+ for j, c := range an.inlineCalls {
+ if j < len(curCalls) && curCalls[j] == c {
+ // Skip if same as previous instruction.
+ continue
+ }
+ curCalls = nil
+ fline, ok := reader.line(c.file, c.line)
+ if !ok {
+ fline = ""
+ }
+ text := strings.Repeat(" ", srcIndent+4+4*j) + strings.TrimSpace(fline)
+ fmt.Fprintf(w, " %8s %10s %10s %8s <span class=inlinesrc>%s</span> <span class=unimportant>%s:%d</span>\n",
+ "", "", "", "",
+ template.HTMLEscapeString(fmt.Sprintf("%-80s", text)),
+ template.HTMLEscapeString(filepath.Base(c.file)), c.line)
+ }
+ curCalls = an.inlineCalls
+ text := strings.Repeat(" ", srcIndent+4+4*len(curCalls)) + an.instruction
+ fmt.Fprintf(w, " %8s %10s %10s %8x: %s <span class=unimportant>%s</span>\n",
+ "", valueOrDot(flat, rpt), valueOrDot(cum, rpt), an.address,
+ template.HTMLEscapeString(fmt.Sprintf("%-80s", text)),
+ template.HTMLEscapeString(fileline))
+ }
+ fmt.Fprintln(w, "</span>")
+}
+
+// printFunctionClosing prints the end of a function in a weblist report.
+func printFunctionClosing(w io.Writer) {
+ fmt.Fprintln(w, "</pre>")
+}
+
+// printPageClosing prints the end of the page in a weblist report.
+func printPageClosing(w io.Writer) {
+ fmt.Fprintln(w, weblistPageClosing)
+}
+
+// getSourceFromFile collects the sources of a function from a source
+// file and annotates it with the samples in fns. Returns the sources
+// as nodes, using the info.name field to hold the source code.
+func getSourceFromFile(file string, reader *sourceReader, fns graph.Nodes, start, end int) (graph.Nodes, string, error) {
+ lineNodes := make(map[int]graph.Nodes)
+
+ // Collect source coordinates from profile.
+ const margin = 5 // Lines before first/after last sample.
+ if start == 0 {
+ if fns[0].Info.StartLine != 0 {
+ start = fns[0].Info.StartLine
+ } else {
+ start = fns[0].Info.Lineno - margin
+ }
+ } else {
+ start -= margin
+ }
+ if end == 0 {
+ end = fns[0].Info.Lineno
+ }
+ end += margin
+ for _, n := range fns {
+ lineno := n.Info.Lineno
+ nodeStart := n.Info.StartLine
+ if nodeStart == 0 {
+ nodeStart = lineno - margin
+ }
+ nodeEnd := lineno + margin
+ if nodeStart < start {
+ start = nodeStart
+ } else if nodeEnd > end {
+ end = nodeEnd
+ }
+ lineNodes[lineno] = append(lineNodes[lineno], n)
+ }
+ if start < 1 {
+ start = 1
+ }
+
+ var src graph.Nodes
+ for lineno := start; lineno <= end; lineno++ {
+ line, ok := reader.line(file, lineno)
+ if !ok {
+ break
+ }
+ flat, cum := lineNodes[lineno].Sum()
+ src = append(src, &graph.Node{
+ Info: graph.NodeInfo{
+ Name: strings.TrimRight(line, "\n"),
+ Lineno: lineno,
+ },
+ Flat: flat,
+ Cum: cum,
+ })
+ }
+ if err := reader.fileError(file); err != nil {
+ return nil, file, err
+ }
+ return src, file, nil
+}
+
+// getMissingFunctionSource creates a dummy function body to point to
+// the source file and annotates it with the samples in asm.
+func getMissingFunctionSource(filename string, asm map[int][]assemblyInstruction, start, end int) (graph.Nodes, string) {
+ var fnodes graph.Nodes
+ for i := start; i <= end; i++ {
+ insts := asm[i]
+ if len(insts) == 0 {
+ continue
+ }
+ var group assemblyInstruction
+ for _, insn := range insts {
+ group.flat += insn.flat
+ group.cum += insn.cum
+ group.flatDiv += insn.flatDiv
+ group.cumDiv += insn.cumDiv
+ }
+ flat := group.flatValue()
+ cum := group.cumValue()
+ fnodes = append(fnodes, &graph.Node{
+ Info: graph.NodeInfo{
+ Name: "???",
+ Lineno: i,
+ },
+ Flat: flat,
+ Cum: cum,
+ })
+ }
+ return fnodes, filename
+}
+
+// sourceReader provides access to source code with caching of file contents.
+type sourceReader struct {
+ // searchPath is a filepath.ListSeparator-separated list of directories where
+ // source files should be searched.
+ searchPath string
+
+ // trimPath is a filepath.ListSeparator-separated list of paths to trim.
+ trimPath string
+
+ // files maps from path name to a list of lines.
+ // files[*][0] is unused since line numbering starts at 1.
+ files map[string][]string
+
+ // errors collects errors encountered per file. These errors are
+ // consulted before returning out of these module.
+ errors map[string]error
+}
+
+func newSourceReader(searchPath, trimPath string) *sourceReader {
+ return &sourceReader{
+ searchPath,
+ trimPath,
+ make(map[string][]string),
+ make(map[string]error),
+ }
+}
+
+func (reader *sourceReader) fileError(path string) error {
+ return reader.errors[path]
+}
+
+func (reader *sourceReader) line(path string, lineno int) (string, bool) {
+ lines, ok := reader.files[path]
+ if !ok {
+ // Read and cache file contents.
+ lines = []string{""} // Skip 0th line
+ f, err := openSourceFile(path, reader.searchPath, reader.trimPath)
+ if err != nil {
+ reader.errors[path] = err
+ } else {
+ s := bufio.NewScanner(f)
+ for s.Scan() {
+ lines = append(lines, s.Text())
+ }
+ f.Close()
+ if s.Err() != nil {
+ reader.errors[path] = err
+ }
+ }
+ reader.files[path] = lines
+ }
+ if lineno <= 0 || lineno >= len(lines) {
+ return "", false
+ }
+ return lines[lineno], true
+}
+
+// openSourceFile opens a source file from a name encoded in a profile. File
+// names in a profile after can be relative paths, so search them in each of
+// the paths in searchPath and their parents. In case the profile contains
+// absolute paths, additional paths may be configured to trim from the source
+// paths in the profile. This effectively turns the path into a relative path
+// searching it using searchPath as usual).
+func openSourceFile(path, searchPath, trim string) (*os.File, error) {
+ path = trimPath(path, trim, searchPath)
+ // If file is still absolute, require file to exist.
+ if filepath.IsAbs(path) {
+ f, err := os.Open(path)
+ return f, err
+ }
+ // Scan each component of the path.
+ for _, dir := range filepath.SplitList(searchPath) {
+ // Search up for every parent of each possible path.
+ for {
+ filename := filepath.Join(dir, path)
+ if f, err := os.Open(filename); err == nil {
+ return f, nil
+ }
+ parent := filepath.Dir(dir)
+ if parent == dir {
+ break
+ }
+ dir = parent
+ }
+ }
+
+ return nil, fmt.Errorf("could not find file %s on path %s", path, searchPath)
+}
+
+// trimPath cleans up a path by removing prefixes that are commonly
+// found on profiles plus configured prefixes.
+// TODO(aalexand): Consider optimizing out the redundant work done in this
+// function if it proves to matter.
+func trimPath(path, trimPath, searchPath string) string {
+ // Keep path variable intact as it's used below to form the return value.
+ sPath, searchPath := filepath.ToSlash(path), filepath.ToSlash(searchPath)
+ if trimPath == "" {
+ // If the trim path is not configured, try to guess it heuristically:
+ // search for basename of each search path in the original path and, if
+ // found, strip everything up to and including the basename. So, for
+ // example, given original path "/some/remote/path/my-project/foo/bar.c"
+ // and search path "/my/local/path/my-project" the heuristic will return
+ // "/my/local/path/my-project/foo/bar.c".
+ for _, dir := range filepath.SplitList(searchPath) {
+ want := "/" + filepath.Base(dir) + "/"
+ if found := strings.Index(sPath, want); found != -1 {
+ return path[found+len(want):]
+ }
+ }
+ }
+ // Trim configured trim prefixes.
+ trimPaths := append(filepath.SplitList(filepath.ToSlash(trimPath)), "/proc/self/cwd/./", "/proc/self/cwd/")
+ for _, trimPath := range trimPaths {
+ if !strings.HasSuffix(trimPath, "/") {
+ trimPath += "/"
+ }
+ if strings.HasPrefix(sPath, trimPath) {
+ return path[len(trimPath):]
+ }
+ }
+ return path
+}
+
+func indentation(line string) int {
+ column := 0
+ for _, c := range line {
+ if c == ' ' {
+ column++
+ } else if c == '\t' {
+ column++
+ for column%8 != 0 {
+ column++
+ }
+ } else {
+ break
+ }
+ }
+ return column
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go b/src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go
new file mode 100644
index 0000000..02a6d77
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/report/source_html.go
@@ -0,0 +1,84 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package report
+
+import (
+ "html/template"
+)
+
+// AddSourceTemplates adds templates used by PrintWebList to t.
+func AddSourceTemplates(t *template.Template) {
+ template.Must(t.Parse(`{{define "weblistcss"}}` + weblistPageCSS + `{{end}}`))
+ template.Must(t.Parse(`{{define "weblistjs"}}` + weblistPageScript + `{{end}}`))
+}
+
+const weblistPageCSS = `<style type="text/css">
+body {
+font-family: sans-serif;
+}
+h1 {
+ font-size: 1.5em;
+ margin-bottom: 4px;
+}
+.legend {
+ font-size: 1.25em;
+}
+.line, .nop, .unimportant {
+ color: #aaaaaa;
+}
+.inlinesrc {
+ color: #000066;
+}
+.deadsrc {
+cursor: pointer;
+}
+.deadsrc:hover {
+background-color: #eeeeee;
+}
+.livesrc {
+color: #0000ff;
+cursor: pointer;
+}
+.livesrc:hover {
+background-color: #eeeeee;
+}
+.asm {
+color: #008800;
+display: none;
+}
+</style>`
+
+const weblistPageScript = `<script type="text/javascript">
+function pprof_toggle_asm(e) {
+ var target;
+ if (!e) e = window.event;
+ if (e.target) target = e.target;
+ else if (e.srcElement) target = e.srcElement;
+
+ if (target) {
+ var asm = target.nextSibling;
+ if (asm && asm.className == "asm") {
+ asm.style.display = (asm.style.display == "block" ? "" : "block");
+ e.preventDefault();
+ return false;
+ }
+ }
+}
+</script>`
+
+const weblistPageClosing = `
+</body>
+</html>
+`
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go b/src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go
new file mode 100644
index 0000000..d741e7a
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/symbolizer/symbolizer.go
@@ -0,0 +1,361 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package symbolizer provides a routine to populate a profile with
+// symbol, file and line number information. It relies on the
+// addr2liner and demangle packages to do the actual work.
+package symbolizer
+
+import (
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ "github.com/google/pprof/internal/binutils"
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/internal/symbolz"
+ "github.com/google/pprof/profile"
+ "github.com/ianlancetaylor/demangle"
+)
+
+// Symbolizer implements the plugin.Symbolize interface.
+type Symbolizer struct {
+ Obj plugin.ObjTool
+ UI plugin.UI
+ Transport http.RoundTripper
+}
+
+// test taps for dependency injection
+var symbolzSymbolize = symbolz.Symbolize
+var localSymbolize = doLocalSymbolize
+var demangleFunction = Demangle
+
+// Symbolize attempts to symbolize profile p. First uses binutils on
+// local binaries; if the source is a URL it attempts to get any
+// missed entries using symbolz.
+func (s *Symbolizer) Symbolize(mode string, sources plugin.MappingSources, p *profile.Profile) error {
+ remote, local, fast, force, demanglerMode := true, true, false, false, ""
+ for _, o := range strings.Split(strings.ToLower(mode), ":") {
+ switch o {
+ case "":
+ continue
+ case "none", "no":
+ return nil
+ case "local":
+ remote, local = false, true
+ case "fastlocal":
+ remote, local, fast = false, true, true
+ case "remote":
+ remote, local = true, false
+ case "force":
+ force = true
+ default:
+ switch d := strings.TrimPrefix(o, "demangle="); d {
+ case "full", "none", "templates":
+ demanglerMode = d
+ force = true
+ continue
+ case "default":
+ continue
+ }
+ s.UI.PrintErr("ignoring unrecognized symbolization option: " + mode)
+ s.UI.PrintErr("expecting -symbolize=[local|fastlocal|remote|none][:force][:demangle=[none|full|templates|default]")
+ }
+ }
+
+ var err error
+ if local {
+ // Symbolize locally using binutils.
+ if err = localSymbolize(p, fast, force, s.Obj, s.UI); err != nil {
+ s.UI.PrintErr("local symbolization: " + err.Error())
+ }
+ }
+ if remote {
+ post := func(source, post string) ([]byte, error) {
+ return postURL(source, post, s.Transport)
+ }
+ if err = symbolzSymbolize(p, force, sources, post, s.UI); err != nil {
+ return err // Ran out of options.
+ }
+ }
+
+ demangleFunction(p, force, demanglerMode)
+ return nil
+}
+
+// postURL issues a POST to a URL over HTTP.
+func postURL(source, post string, tr http.RoundTripper) ([]byte, error) {
+ client := &http.Client{
+ Transport: tr,
+ }
+ resp, err := client.Post(source, "application/octet-stream", strings.NewReader(post))
+ if err != nil {
+ return nil, fmt.Errorf("http post %s: %v", source, err)
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("http post %s: %v", source, statusCodeError(resp))
+ }
+ return ioutil.ReadAll(resp.Body)
+}
+
+func statusCodeError(resp *http.Response) error {
+ if resp.Header.Get("X-Go-Pprof") != "" && strings.Contains(resp.Header.Get("Content-Type"), "text/plain") {
+ // error is from pprof endpoint
+ if body, err := ioutil.ReadAll(resp.Body); err == nil {
+ return fmt.Errorf("server response: %s - %s", resp.Status, body)
+ }
+ }
+ return fmt.Errorf("server response: %s", resp.Status)
+}
+
+// doLocalSymbolize adds symbol and line number information to all locations
+// in a profile. mode enables some options to control
+// symbolization.
+func doLocalSymbolize(prof *profile.Profile, fast, force bool, obj plugin.ObjTool, ui plugin.UI) error {
+ if fast {
+ if bu, ok := obj.(*binutils.Binutils); ok {
+ bu.SetFastSymbolization(true)
+ }
+ }
+
+ mt, err := newMapping(prof, obj, ui, force)
+ if err != nil {
+ return err
+ }
+ defer mt.close()
+
+ functions := make(map[profile.Function]*profile.Function)
+ for _, l := range mt.prof.Location {
+ m := l.Mapping
+ segment := mt.segments[m]
+ if segment == nil {
+ // Nothing to do.
+ continue
+ }
+
+ stack, err := segment.SourceLine(l.Address)
+ if err != nil || len(stack) == 0 {
+ // No answers from addr2line.
+ continue
+ }
+
+ l.Line = make([]profile.Line, len(stack))
+ l.IsFolded = false
+ for i, frame := range stack {
+ if frame.Func != "" {
+ m.HasFunctions = true
+ }
+ if frame.File != "" {
+ m.HasFilenames = true
+ }
+ if frame.Line != 0 {
+ m.HasLineNumbers = true
+ }
+ f := &profile.Function{
+ Name: frame.Func,
+ SystemName: frame.Func,
+ Filename: frame.File,
+ }
+ if fp := functions[*f]; fp != nil {
+ f = fp
+ } else {
+ functions[*f] = f
+ f.ID = uint64(len(mt.prof.Function)) + 1
+ mt.prof.Function = append(mt.prof.Function, f)
+ }
+ l.Line[i] = profile.Line{
+ Function: f,
+ Line: int64(frame.Line),
+ }
+ }
+
+ if len(stack) > 0 {
+ m.HasInlineFrames = true
+ }
+ }
+
+ return nil
+}
+
+// Demangle updates the function names in a profile with demangled C++
+// names, simplified according to demanglerMode. If force is set,
+// overwrite any names that appear already demangled.
+func Demangle(prof *profile.Profile, force bool, demanglerMode string) {
+ if force {
+ // Remove the current demangled names to force demangling
+ for _, f := range prof.Function {
+ if f.Name != "" && f.SystemName != "" {
+ f.Name = f.SystemName
+ }
+ }
+ }
+
+ var options []demangle.Option
+ switch demanglerMode {
+ case "": // demangled, simplified: no parameters, no templates, no return type
+ options = []demangle.Option{demangle.NoParams, demangle.NoTemplateParams}
+ case "templates": // demangled, simplified: no parameters, no return type
+ options = []demangle.Option{demangle.NoParams}
+ case "full":
+ options = []demangle.Option{demangle.NoClones}
+ case "none": // no demangling
+ return
+ }
+
+ // Copy the options because they may be updated by the call.
+ o := make([]demangle.Option, len(options))
+ for _, fn := range prof.Function {
+ if fn.Name != "" && fn.SystemName != fn.Name {
+ continue // Already demangled.
+ }
+ copy(o, options)
+ if demangled := demangle.Filter(fn.SystemName, o...); demangled != fn.SystemName {
+ fn.Name = demangled
+ continue
+ }
+ // Could not demangle. Apply heuristics in case the name is
+ // already demangled.
+ name := fn.SystemName
+ if looksLikeDemangledCPlusPlus(name) {
+ if demanglerMode == "" || demanglerMode == "templates" {
+ name = removeMatching(name, '(', ')')
+ }
+ if demanglerMode == "" {
+ name = removeMatching(name, '<', '>')
+ }
+ }
+ fn.Name = name
+ }
+}
+
+// looksLikeDemangledCPlusPlus is a heuristic to decide if a name is
+// the result of demangling C++. If so, further heuristics will be
+// applied to simplify the name.
+func looksLikeDemangledCPlusPlus(demangled string) bool {
+ if strings.Contains(demangled, ".<") { // Skip java names of the form "class.<init>"
+ return false
+ }
+ return strings.ContainsAny(demangled, "<>[]") || strings.Contains(demangled, "::")
+}
+
+// removeMatching removes nested instances of start..end from name.
+func removeMatching(name string, start, end byte) string {
+ s := string(start) + string(end)
+ var nesting, first, current int
+ for index := strings.IndexAny(name[current:], s); index != -1; index = strings.IndexAny(name[current:], s) {
+ switch current += index; name[current] {
+ case start:
+ nesting++
+ if nesting == 1 {
+ first = current
+ }
+ case end:
+ nesting--
+ switch {
+ case nesting < 0:
+ return name // Mismatch, abort
+ case nesting == 0:
+ name = name[:first] + name[current+1:]
+ current = first - 1
+ }
+ }
+ current++
+ }
+ return name
+}
+
+// newMapping creates a mappingTable for a profile.
+func newMapping(prof *profile.Profile, obj plugin.ObjTool, ui plugin.UI, force bool) (*mappingTable, error) {
+ mt := &mappingTable{
+ prof: prof,
+ segments: make(map[*profile.Mapping]plugin.ObjFile),
+ }
+
+ // Identify used mappings
+ mappings := make(map[*profile.Mapping]bool)
+ for _, l := range prof.Location {
+ mappings[l.Mapping] = true
+ }
+
+ missingBinaries := false
+ for midx, m := range prof.Mapping {
+ if !mappings[m] {
+ continue
+ }
+
+ // Do not attempt to re-symbolize a mapping that has already been symbolized.
+ if !force && (m.HasFunctions || m.HasFilenames || m.HasLineNumbers) {
+ continue
+ }
+
+ if m.File == "" {
+ if midx == 0 {
+ ui.PrintErr("Main binary filename not available.")
+ continue
+ }
+ missingBinaries = true
+ continue
+ }
+
+ // Skip well-known system mappings
+ if m.Unsymbolizable() {
+ continue
+ }
+
+ // Skip mappings pointing to a source URL
+ if m.BuildID == "" {
+ if u, err := url.Parse(m.File); err == nil && u.IsAbs() && strings.Contains(strings.ToLower(u.Scheme), "http") {
+ continue
+ }
+ }
+
+ name := filepath.Base(m.File)
+ f, err := obj.Open(m.File, m.Start, m.Limit, m.Offset)
+ if err != nil {
+ ui.PrintErr("Local symbolization failed for ", name, ": ", err)
+ missingBinaries = true
+ continue
+ }
+ if fid := f.BuildID(); m.BuildID != "" && fid != "" && fid != m.BuildID {
+ ui.PrintErr("Local symbolization failed for ", name, ": build ID mismatch")
+ f.Close()
+ continue
+ }
+
+ mt.segments[m] = f
+ }
+ if missingBinaries {
+ ui.PrintErr("Some binary filenames not available. Symbolization may be incomplete.\n" +
+ "Try setting PPROF_BINARY_PATH to the search path for local binaries.")
+ }
+ return mt, nil
+}
+
+// mappingTable contains the mechanisms for symbolization of a
+// profile.
+type mappingTable struct {
+ prof *profile.Profile
+ segments map[*profile.Mapping]plugin.ObjFile
+}
+
+// Close releases any external processes being used for the mapping.
+func (mt *mappingTable) close() {
+ for _, segment := range mt.segments {
+ segment.Close()
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go b/src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go
new file mode 100644
index 0000000..7be3048
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/symbolz/symbolz.go
@@ -0,0 +1,200 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package symbolz symbolizes a profile using the output from the symbolz
+// service.
+package symbolz
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/url"
+ "path"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/google/pprof/internal/plugin"
+ "github.com/google/pprof/profile"
+)
+
+var (
+ symbolzRE = regexp.MustCompile(`(0x[[:xdigit:]]+)\s+(.*)`)
+)
+
+// Symbolize symbolizes profile p by parsing data returned by a symbolz
+// handler. syms receives the symbolz query (hex addresses separated by '+')
+// and returns the symbolz output in a string. If force is false, it will only
+// symbolize locations from mappings not already marked as HasFunctions. Never
+// attempts symbolization of addresses from unsymbolizable system
+// mappings as those may look negative - e.g. "[vsyscall]".
+func Symbolize(p *profile.Profile, force bool, sources plugin.MappingSources, syms func(string, string) ([]byte, error), ui plugin.UI) error {
+ for _, m := range p.Mapping {
+ if !force && m.HasFunctions {
+ // Only check for HasFunctions as symbolz only populates function names.
+ continue
+ }
+ // Skip well-known system mappings.
+ if m.Unsymbolizable() {
+ continue
+ }
+ mappingSources := sources[m.File]
+ if m.BuildID != "" {
+ mappingSources = append(mappingSources, sources[m.BuildID]...)
+ }
+ for _, source := range mappingSources {
+ if symz := symbolz(source.Source); symz != "" {
+ if err := symbolizeMapping(symz, int64(source.Start)-int64(m.Start), syms, m, p); err != nil {
+ return err
+ }
+ m.HasFunctions = true
+ break
+ }
+ }
+ }
+
+ return nil
+}
+
+// hasGperftoolsSuffix checks whether path ends with one of the suffixes listed in
+// pprof_remote_servers.html from the gperftools distribution
+func hasGperftoolsSuffix(path string) bool {
+ suffixes := []string{
+ "/pprof/heap",
+ "/pprof/growth",
+ "/pprof/profile",
+ "/pprof/pmuprofile",
+ "/pprof/contention",
+ }
+ for _, s := range suffixes {
+ if strings.HasSuffix(path, s) {
+ return true
+ }
+ }
+ return false
+}
+
+// symbolz returns the corresponding symbolz source for a profile URL.
+func symbolz(source string) string {
+ if url, err := url.Parse(source); err == nil && url.Host != "" {
+ // All paths in the net/http/pprof Go package contain /debug/pprof/
+ if strings.Contains(url.Path, "/debug/pprof/") || hasGperftoolsSuffix(url.Path) {
+ url.Path = path.Clean(url.Path + "/../symbol")
+ } else {
+ url.Path = "/symbolz"
+ }
+ url.RawQuery = ""
+ return url.String()
+ }
+
+ return ""
+}
+
+// symbolizeMapping symbolizes locations belonging to a Mapping by querying
+// a symbolz handler. An offset is applied to all addresses to take care of
+// normalization occurred for merged Mappings.
+func symbolizeMapping(source string, offset int64, syms func(string, string) ([]byte, error), m *profile.Mapping, p *profile.Profile) error {
+ // Construct query of addresses to symbolize.
+ var a []string
+ for _, l := range p.Location {
+ if l.Mapping == m && l.Address != 0 && len(l.Line) == 0 {
+ // Compensate for normalization.
+ addr, overflow := adjust(l.Address, offset)
+ if overflow {
+ return fmt.Errorf("cannot adjust address %d by %d, it would overflow (mapping %v)", l.Address, offset, l.Mapping)
+ }
+ a = append(a, fmt.Sprintf("%#x", addr))
+ }
+ }
+
+ if len(a) == 0 {
+ // No addresses to symbolize.
+ return nil
+ }
+
+ lines := make(map[uint64]profile.Line)
+ functions := make(map[string]*profile.Function)
+
+ b, err := syms(source, strings.Join(a, "+"))
+ if err != nil {
+ return err
+ }
+
+ buf := bytes.NewBuffer(b)
+ for {
+ l, err := buf.ReadString('\n')
+
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return err
+ }
+
+ if symbol := symbolzRE.FindStringSubmatch(l); len(symbol) == 3 {
+ origAddr, err := strconv.ParseUint(symbol[1], 0, 64)
+ if err != nil {
+ return fmt.Errorf("unexpected parse failure %s: %v", symbol[1], err)
+ }
+ // Reapply offset expected by the profile.
+ addr, overflow := adjust(origAddr, -offset)
+ if overflow {
+ return fmt.Errorf("cannot adjust symbolz address %d by %d, it would overflow", origAddr, -offset)
+ }
+
+ name := symbol[2]
+ fn := functions[name]
+ if fn == nil {
+ fn = &profile.Function{
+ ID: uint64(len(p.Function) + 1),
+ Name: name,
+ SystemName: name,
+ }
+ functions[name] = fn
+ p.Function = append(p.Function, fn)
+ }
+
+ lines[addr] = profile.Line{Function: fn}
+ }
+ }
+
+ for _, l := range p.Location {
+ if l.Mapping != m {
+ continue
+ }
+ if line, ok := lines[l.Address]; ok {
+ l.Line = []profile.Line{line}
+ }
+ }
+
+ return nil
+}
+
+// adjust shifts the specified address by the signed offset. It returns the
+// adjusted address. It signals that the address cannot be adjusted without an
+// overflow by returning true in the second return value.
+func adjust(addr uint64, offset int64) (uint64, bool) {
+ adj := uint64(int64(addr) + offset)
+ if offset < 0 {
+ if adj >= addr {
+ return 0, true
+ }
+ } else {
+ if adj < addr {
+ return 0, true
+ }
+ }
+ return adj, false
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go b/src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go
new file mode 100644
index 0000000..b5fb1dd
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/internal/transport/transport.go
@@ -0,0 +1,131 @@
+// Copyright 2018 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package transport provides a mechanism to send requests with https cert,
+// key, and CA.
+package transport
+
+import (
+ "crypto/tls"
+ "crypto/x509"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "sync"
+
+ "github.com/google/pprof/internal/plugin"
+)
+
+type transport struct {
+ cert *string
+ key *string
+ ca *string
+ caCertPool *x509.CertPool
+ certs []tls.Certificate
+ initOnce sync.Once
+ initErr error
+}
+
+const extraUsage = ` -tls_cert TLS client certificate file for fetching profile and symbols
+ -tls_key TLS private key file for fetching profile and symbols
+ -tls_ca TLS CA certs file for fetching profile and symbols`
+
+// New returns a round tripper for making requests with the
+// specified cert, key, and ca. The flags tls_cert, tls_key, and tls_ca are
+// added to the flagset to allow a user to specify the cert, key, and ca. If
+// the flagset is nil, no flags will be added, and users will not be able to
+// use these flags.
+func New(flagset plugin.FlagSet) http.RoundTripper {
+ if flagset == nil {
+ return &transport{}
+ }
+ flagset.AddExtraUsage(extraUsage)
+ return &transport{
+ cert: flagset.String("tls_cert", "", "TLS client certificate file for fetching profile and symbols"),
+ key: flagset.String("tls_key", "", "TLS private key file for fetching profile and symbols"),
+ ca: flagset.String("tls_ca", "", "TLS CA certs file for fetching profile and symbols"),
+ }
+}
+
+// initialize uses the cert, key, and ca to initialize the certs
+// to use these when making requests.
+func (tr *transport) initialize() error {
+ var cert, key, ca string
+ if tr.cert != nil {
+ cert = *tr.cert
+ }
+ if tr.key != nil {
+ key = *tr.key
+ }
+ if tr.ca != nil {
+ ca = *tr.ca
+ }
+
+ if cert != "" && key != "" {
+ tlsCert, err := tls.LoadX509KeyPair(cert, key)
+ if err != nil {
+ return fmt.Errorf("could not load certificate/key pair specified by -tls_cert and -tls_key: %v", err)
+ }
+ tr.certs = []tls.Certificate{tlsCert}
+ } else if cert == "" && key != "" {
+ return fmt.Errorf("-tls_key is specified, so -tls_cert must also be specified")
+ } else if cert != "" && key == "" {
+ return fmt.Errorf("-tls_cert is specified, so -tls_key must also be specified")
+ }
+
+ if ca != "" {
+ caCertPool := x509.NewCertPool()
+ caCert, err := ioutil.ReadFile(ca)
+ if err != nil {
+ return fmt.Errorf("could not load CA specified by -tls_ca: %v", err)
+ }
+ caCertPool.AppendCertsFromPEM(caCert)
+ tr.caCertPool = caCertPool
+ }
+
+ return nil
+}
+
+// RoundTrip executes a single HTTP transaction, returning
+// a Response for the provided Request.
+func (tr *transport) RoundTrip(req *http.Request) (*http.Response, error) {
+ tr.initOnce.Do(func() {
+ tr.initErr = tr.initialize()
+ })
+ if tr.initErr != nil {
+ return nil, tr.initErr
+ }
+
+ tlsConfig := &tls.Config{
+ RootCAs: tr.caCertPool,
+ Certificates: tr.certs,
+ }
+
+ if req.URL.Scheme == "https+insecure" {
+ // Make shallow copy of request, and req.URL, so the request's URL can be
+ // modified.
+ r := *req
+ *r.URL = *req.URL
+ req = &r
+ tlsConfig.InsecureSkipVerify = true
+ req.URL.Scheme = "https"
+ }
+
+ transport := http.Transport{
+ Proxy: http.ProxyFromEnvironment,
+ TLSClientConfig: tlsConfig,
+ }
+
+ return transport.RoundTrip(req)
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/encode.go b/src/cmd/vendor/github.com/google/pprof/profile/encode.go
new file mode 100644
index 0000000..1e84c72
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/encode.go
@@ -0,0 +1,567 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+import (
+ "errors"
+ "sort"
+)
+
+func (p *Profile) decoder() []decoder {
+ return profileDecoder
+}
+
+// preEncode populates the unexported fields to be used by encode
+// (with suffix X) from the corresponding exported fields. The
+// exported fields are cleared up to facilitate testing.
+func (p *Profile) preEncode() {
+ strings := make(map[string]int)
+ addString(strings, "")
+
+ for _, st := range p.SampleType {
+ st.typeX = addString(strings, st.Type)
+ st.unitX = addString(strings, st.Unit)
+ }
+
+ for _, s := range p.Sample {
+ s.labelX = nil
+ var keys []string
+ for k := range s.Label {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ vs := s.Label[k]
+ for _, v := range vs {
+ s.labelX = append(s.labelX,
+ label{
+ keyX: addString(strings, k),
+ strX: addString(strings, v),
+ },
+ )
+ }
+ }
+ var numKeys []string
+ for k := range s.NumLabel {
+ numKeys = append(numKeys, k)
+ }
+ sort.Strings(numKeys)
+ for _, k := range numKeys {
+ keyX := addString(strings, k)
+ vs := s.NumLabel[k]
+ units := s.NumUnit[k]
+ for i, v := range vs {
+ var unitX int64
+ if len(units) != 0 {
+ unitX = addString(strings, units[i])
+ }
+ s.labelX = append(s.labelX,
+ label{
+ keyX: keyX,
+ numX: v,
+ unitX: unitX,
+ },
+ )
+ }
+ }
+ s.locationIDX = make([]uint64, len(s.Location))
+ for i, loc := range s.Location {
+ s.locationIDX[i] = loc.ID
+ }
+ }
+
+ for _, m := range p.Mapping {
+ m.fileX = addString(strings, m.File)
+ m.buildIDX = addString(strings, m.BuildID)
+ }
+
+ for _, l := range p.Location {
+ for i, ln := range l.Line {
+ if ln.Function != nil {
+ l.Line[i].functionIDX = ln.Function.ID
+ } else {
+ l.Line[i].functionIDX = 0
+ }
+ }
+ if l.Mapping != nil {
+ l.mappingIDX = l.Mapping.ID
+ } else {
+ l.mappingIDX = 0
+ }
+ }
+ for _, f := range p.Function {
+ f.nameX = addString(strings, f.Name)
+ f.systemNameX = addString(strings, f.SystemName)
+ f.filenameX = addString(strings, f.Filename)
+ }
+
+ p.dropFramesX = addString(strings, p.DropFrames)
+ p.keepFramesX = addString(strings, p.KeepFrames)
+
+ if pt := p.PeriodType; pt != nil {
+ pt.typeX = addString(strings, pt.Type)
+ pt.unitX = addString(strings, pt.Unit)
+ }
+
+ p.commentX = nil
+ for _, c := range p.Comments {
+ p.commentX = append(p.commentX, addString(strings, c))
+ }
+
+ p.defaultSampleTypeX = addString(strings, p.DefaultSampleType)
+
+ p.stringTable = make([]string, len(strings))
+ for s, i := range strings {
+ p.stringTable[i] = s
+ }
+}
+
+func (p *Profile) encode(b *buffer) {
+ for _, x := range p.SampleType {
+ encodeMessage(b, 1, x)
+ }
+ for _, x := range p.Sample {
+ encodeMessage(b, 2, x)
+ }
+ for _, x := range p.Mapping {
+ encodeMessage(b, 3, x)
+ }
+ for _, x := range p.Location {
+ encodeMessage(b, 4, x)
+ }
+ for _, x := range p.Function {
+ encodeMessage(b, 5, x)
+ }
+ encodeStrings(b, 6, p.stringTable)
+ encodeInt64Opt(b, 7, p.dropFramesX)
+ encodeInt64Opt(b, 8, p.keepFramesX)
+ encodeInt64Opt(b, 9, p.TimeNanos)
+ encodeInt64Opt(b, 10, p.DurationNanos)
+ if pt := p.PeriodType; pt != nil && (pt.typeX != 0 || pt.unitX != 0) {
+ encodeMessage(b, 11, p.PeriodType)
+ }
+ encodeInt64Opt(b, 12, p.Period)
+ encodeInt64s(b, 13, p.commentX)
+ encodeInt64(b, 14, p.defaultSampleTypeX)
+}
+
+var profileDecoder = []decoder{
+ nil, // 0
+ // repeated ValueType sample_type = 1
+ func(b *buffer, m message) error {
+ x := new(ValueType)
+ pp := m.(*Profile)
+ pp.SampleType = append(pp.SampleType, x)
+ return decodeMessage(b, x)
+ },
+ // repeated Sample sample = 2
+ func(b *buffer, m message) error {
+ x := new(Sample)
+ pp := m.(*Profile)
+ pp.Sample = append(pp.Sample, x)
+ return decodeMessage(b, x)
+ },
+ // repeated Mapping mapping = 3
+ func(b *buffer, m message) error {
+ x := new(Mapping)
+ pp := m.(*Profile)
+ pp.Mapping = append(pp.Mapping, x)
+ return decodeMessage(b, x)
+ },
+ // repeated Location location = 4
+ func(b *buffer, m message) error {
+ x := new(Location)
+ x.Line = make([]Line, 0, 8) // Pre-allocate Line buffer
+ pp := m.(*Profile)
+ pp.Location = append(pp.Location, x)
+ err := decodeMessage(b, x)
+ var tmp []Line
+ x.Line = append(tmp, x.Line...) // Shrink to allocated size
+ return err
+ },
+ // repeated Function function = 5
+ func(b *buffer, m message) error {
+ x := new(Function)
+ pp := m.(*Profile)
+ pp.Function = append(pp.Function, x)
+ return decodeMessage(b, x)
+ },
+ // repeated string string_table = 6
+ func(b *buffer, m message) error {
+ err := decodeStrings(b, &m.(*Profile).stringTable)
+ if err != nil {
+ return err
+ }
+ if m.(*Profile).stringTable[0] != "" {
+ return errors.New("string_table[0] must be ''")
+ }
+ return nil
+ },
+ // int64 drop_frames = 7
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).dropFramesX) },
+ // int64 keep_frames = 8
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).keepFramesX) },
+ // int64 time_nanos = 9
+ func(b *buffer, m message) error {
+ if m.(*Profile).TimeNanos != 0 {
+ return errConcatProfile
+ }
+ return decodeInt64(b, &m.(*Profile).TimeNanos)
+ },
+ // int64 duration_nanos = 10
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).DurationNanos) },
+ // ValueType period_type = 11
+ func(b *buffer, m message) error {
+ x := new(ValueType)
+ pp := m.(*Profile)
+ pp.PeriodType = x
+ return decodeMessage(b, x)
+ },
+ // int64 period = 12
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).Period) },
+ // repeated int64 comment = 13
+ func(b *buffer, m message) error { return decodeInt64s(b, &m.(*Profile).commentX) },
+ // int64 defaultSampleType = 14
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Profile).defaultSampleTypeX) },
+}
+
+// postDecode takes the unexported fields populated by decode (with
+// suffix X) and populates the corresponding exported fields.
+// The unexported fields are cleared up to facilitate testing.
+func (p *Profile) postDecode() error {
+ var err error
+ mappings := make(map[uint64]*Mapping, len(p.Mapping))
+ mappingIds := make([]*Mapping, len(p.Mapping)+1)
+ for _, m := range p.Mapping {
+ m.File, err = getString(p.stringTable, &m.fileX, err)
+ m.BuildID, err = getString(p.stringTable, &m.buildIDX, err)
+ if m.ID < uint64(len(mappingIds)) {
+ mappingIds[m.ID] = m
+ } else {
+ mappings[m.ID] = m
+ }
+ }
+
+ functions := make(map[uint64]*Function, len(p.Function))
+ functionIds := make([]*Function, len(p.Function)+1)
+ for _, f := range p.Function {
+ f.Name, err = getString(p.stringTable, &f.nameX, err)
+ f.SystemName, err = getString(p.stringTable, &f.systemNameX, err)
+ f.Filename, err = getString(p.stringTable, &f.filenameX, err)
+ if f.ID < uint64(len(functionIds)) {
+ functionIds[f.ID] = f
+ } else {
+ functions[f.ID] = f
+ }
+ }
+
+ locations := make(map[uint64]*Location, len(p.Location))
+ locationIds := make([]*Location, len(p.Location)+1)
+ for _, l := range p.Location {
+ if id := l.mappingIDX; id < uint64(len(mappingIds)) {
+ l.Mapping = mappingIds[id]
+ } else {
+ l.Mapping = mappings[id]
+ }
+ l.mappingIDX = 0
+ for i, ln := range l.Line {
+ if id := ln.functionIDX; id != 0 {
+ l.Line[i].functionIDX = 0
+ if id < uint64(len(functionIds)) {
+ l.Line[i].Function = functionIds[id]
+ } else {
+ l.Line[i].Function = functions[id]
+ }
+ }
+ }
+ if l.ID < uint64(len(locationIds)) {
+ locationIds[l.ID] = l
+ } else {
+ locations[l.ID] = l
+ }
+ }
+
+ for _, st := range p.SampleType {
+ st.Type, err = getString(p.stringTable, &st.typeX, err)
+ st.Unit, err = getString(p.stringTable, &st.unitX, err)
+ }
+
+ for _, s := range p.Sample {
+ labels := make(map[string][]string, len(s.labelX))
+ numLabels := make(map[string][]int64, len(s.labelX))
+ numUnits := make(map[string][]string, len(s.labelX))
+ for _, l := range s.labelX {
+ var key, value string
+ key, err = getString(p.stringTable, &l.keyX, err)
+ if l.strX != 0 {
+ value, err = getString(p.stringTable, &l.strX, err)
+ labels[key] = append(labels[key], value)
+ } else if l.numX != 0 {
+ numValues := numLabels[key]
+ units := numUnits[key]
+ if l.unitX != 0 {
+ var unit string
+ unit, err = getString(p.stringTable, &l.unitX, err)
+ units = padStringArray(units, len(numValues))
+ numUnits[key] = append(units, unit)
+ }
+ numLabels[key] = append(numLabels[key], l.numX)
+ }
+ }
+ if len(labels) > 0 {
+ s.Label = labels
+ }
+ if len(numLabels) > 0 {
+ s.NumLabel = numLabels
+ for key, units := range numUnits {
+ if len(units) > 0 {
+ numUnits[key] = padStringArray(units, len(numLabels[key]))
+ }
+ }
+ s.NumUnit = numUnits
+ }
+ s.Location = make([]*Location, len(s.locationIDX))
+ for i, lid := range s.locationIDX {
+ if lid < uint64(len(locationIds)) {
+ s.Location[i] = locationIds[lid]
+ } else {
+ s.Location[i] = locations[lid]
+ }
+ }
+ s.locationIDX = nil
+ }
+
+ p.DropFrames, err = getString(p.stringTable, &p.dropFramesX, err)
+ p.KeepFrames, err = getString(p.stringTable, &p.keepFramesX, err)
+
+ if pt := p.PeriodType; pt == nil {
+ p.PeriodType = &ValueType{}
+ }
+
+ if pt := p.PeriodType; pt != nil {
+ pt.Type, err = getString(p.stringTable, &pt.typeX, err)
+ pt.Unit, err = getString(p.stringTable, &pt.unitX, err)
+ }
+
+ for _, i := range p.commentX {
+ var c string
+ c, err = getString(p.stringTable, &i, err)
+ p.Comments = append(p.Comments, c)
+ }
+
+ p.commentX = nil
+ p.DefaultSampleType, err = getString(p.stringTable, &p.defaultSampleTypeX, err)
+ p.stringTable = nil
+ return err
+}
+
+// padStringArray pads arr with enough empty strings to make arr
+// length l when arr's length is less than l.
+func padStringArray(arr []string, l int) []string {
+ if l <= len(arr) {
+ return arr
+ }
+ return append(arr, make([]string, l-len(arr))...)
+}
+
+func (p *ValueType) decoder() []decoder {
+ return valueTypeDecoder
+}
+
+func (p *ValueType) encode(b *buffer) {
+ encodeInt64Opt(b, 1, p.typeX)
+ encodeInt64Opt(b, 2, p.unitX)
+}
+
+var valueTypeDecoder = []decoder{
+ nil, // 0
+ // optional int64 type = 1
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*ValueType).typeX) },
+ // optional int64 unit = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*ValueType).unitX) },
+}
+
+func (p *Sample) decoder() []decoder {
+ return sampleDecoder
+}
+
+func (p *Sample) encode(b *buffer) {
+ encodeUint64s(b, 1, p.locationIDX)
+ encodeInt64s(b, 2, p.Value)
+ for _, x := range p.labelX {
+ encodeMessage(b, 3, x)
+ }
+}
+
+var sampleDecoder = []decoder{
+ nil, // 0
+ // repeated uint64 location = 1
+ func(b *buffer, m message) error { return decodeUint64s(b, &m.(*Sample).locationIDX) },
+ // repeated int64 value = 2
+ func(b *buffer, m message) error { return decodeInt64s(b, &m.(*Sample).Value) },
+ // repeated Label label = 3
+ func(b *buffer, m message) error {
+ s := m.(*Sample)
+ n := len(s.labelX)
+ s.labelX = append(s.labelX, label{})
+ return decodeMessage(b, &s.labelX[n])
+ },
+}
+
+func (p label) decoder() []decoder {
+ return labelDecoder
+}
+
+func (p label) encode(b *buffer) {
+ encodeInt64Opt(b, 1, p.keyX)
+ encodeInt64Opt(b, 2, p.strX)
+ encodeInt64Opt(b, 3, p.numX)
+ encodeInt64Opt(b, 4, p.unitX)
+}
+
+var labelDecoder = []decoder{
+ nil, // 0
+ // optional int64 key = 1
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).keyX) },
+ // optional int64 str = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).strX) },
+ // optional int64 num = 3
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).numX) },
+ // optional int64 num = 4
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*label).unitX) },
+}
+
+func (p *Mapping) decoder() []decoder {
+ return mappingDecoder
+}
+
+func (p *Mapping) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.ID)
+ encodeUint64Opt(b, 2, p.Start)
+ encodeUint64Opt(b, 3, p.Limit)
+ encodeUint64Opt(b, 4, p.Offset)
+ encodeInt64Opt(b, 5, p.fileX)
+ encodeInt64Opt(b, 6, p.buildIDX)
+ encodeBoolOpt(b, 7, p.HasFunctions)
+ encodeBoolOpt(b, 8, p.HasFilenames)
+ encodeBoolOpt(b, 9, p.HasLineNumbers)
+ encodeBoolOpt(b, 10, p.HasInlineFrames)
+}
+
+var mappingDecoder = []decoder{
+ nil, // 0
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).ID) }, // optional uint64 id = 1
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Start) }, // optional uint64 memory_offset = 2
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Limit) }, // optional uint64 memory_limit = 3
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Mapping).Offset) }, // optional uint64 file_offset = 4
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Mapping).fileX) }, // optional int64 filename = 5
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Mapping).buildIDX) }, // optional int64 build_id = 6
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasFunctions) }, // optional bool has_functions = 7
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasFilenames) }, // optional bool has_filenames = 8
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasLineNumbers) }, // optional bool has_line_numbers = 9
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Mapping).HasInlineFrames) }, // optional bool has_inline_frames = 10
+}
+
+func (p *Location) decoder() []decoder {
+ return locationDecoder
+}
+
+func (p *Location) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.ID)
+ encodeUint64Opt(b, 2, p.mappingIDX)
+ encodeUint64Opt(b, 3, p.Address)
+ for i := range p.Line {
+ encodeMessage(b, 4, &p.Line[i])
+ }
+ encodeBoolOpt(b, 5, p.IsFolded)
+}
+
+var locationDecoder = []decoder{
+ nil, // 0
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).ID) }, // optional uint64 id = 1;
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).mappingIDX) }, // optional uint64 mapping_id = 2;
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Location).Address) }, // optional uint64 address = 3;
+ func(b *buffer, m message) error { // repeated Line line = 4
+ pp := m.(*Location)
+ n := len(pp.Line)
+ pp.Line = append(pp.Line, Line{})
+ return decodeMessage(b, &pp.Line[n])
+ },
+ func(b *buffer, m message) error { return decodeBool(b, &m.(*Location).IsFolded) }, // optional bool is_folded = 5;
+}
+
+func (p *Line) decoder() []decoder {
+ return lineDecoder
+}
+
+func (p *Line) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.functionIDX)
+ encodeInt64Opt(b, 2, p.Line)
+}
+
+var lineDecoder = []decoder{
+ nil, // 0
+ // optional uint64 function_id = 1
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Line).functionIDX) },
+ // optional int64 line = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Line).Line) },
+}
+
+func (p *Function) decoder() []decoder {
+ return functionDecoder
+}
+
+func (p *Function) encode(b *buffer) {
+ encodeUint64Opt(b, 1, p.ID)
+ encodeInt64Opt(b, 2, p.nameX)
+ encodeInt64Opt(b, 3, p.systemNameX)
+ encodeInt64Opt(b, 4, p.filenameX)
+ encodeInt64Opt(b, 5, p.StartLine)
+}
+
+var functionDecoder = []decoder{
+ nil, // 0
+ // optional uint64 id = 1
+ func(b *buffer, m message) error { return decodeUint64(b, &m.(*Function).ID) },
+ // optional int64 function_name = 2
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).nameX) },
+ // optional int64 function_system_name = 3
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).systemNameX) },
+ // repeated int64 filename = 4
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).filenameX) },
+ // optional int64 start_line = 5
+ func(b *buffer, m message) error { return decodeInt64(b, &m.(*Function).StartLine) },
+}
+
+func addString(strings map[string]int, s string) int64 {
+ i, ok := strings[s]
+ if !ok {
+ i = len(strings)
+ strings[s] = i
+ }
+ return int64(i)
+}
+
+func getString(strings []string, strng *int64, err error) (string, error) {
+ if err != nil {
+ return "", err
+ }
+ s := int(*strng)
+ if s < 0 || s >= len(strings) {
+ return "", errMalformed
+ }
+ *strng = 0
+ return strings[s], nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/filter.go b/src/cmd/vendor/github.com/google/pprof/profile/filter.go
new file mode 100644
index 0000000..ea8e66c
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/filter.go
@@ -0,0 +1,270 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+// Implements methods to filter samples from profiles.
+
+import "regexp"
+
+// FilterSamplesByName filters the samples in a profile and only keeps
+// samples where at least one frame matches focus but none match ignore.
+// Returns true is the corresponding regexp matched at least one sample.
+func (p *Profile) FilterSamplesByName(focus, ignore, hide, show *regexp.Regexp) (fm, im, hm, hnm bool) {
+ focusOrIgnore := make(map[uint64]bool)
+ hidden := make(map[uint64]bool)
+ for _, l := range p.Location {
+ if ignore != nil && l.matchesName(ignore) {
+ im = true
+ focusOrIgnore[l.ID] = false
+ } else if focus == nil || l.matchesName(focus) {
+ fm = true
+ focusOrIgnore[l.ID] = true
+ }
+
+ if hide != nil && l.matchesName(hide) {
+ hm = true
+ l.Line = l.unmatchedLines(hide)
+ if len(l.Line) == 0 {
+ hidden[l.ID] = true
+ }
+ }
+ if show != nil {
+ l.Line = l.matchedLines(show)
+ if len(l.Line) == 0 {
+ hidden[l.ID] = true
+ } else {
+ hnm = true
+ }
+ }
+ }
+
+ s := make([]*Sample, 0, len(p.Sample))
+ for _, sample := range p.Sample {
+ if focusedAndNotIgnored(sample.Location, focusOrIgnore) {
+ if len(hidden) > 0 {
+ var locs []*Location
+ for _, loc := range sample.Location {
+ if !hidden[loc.ID] {
+ locs = append(locs, loc)
+ }
+ }
+ if len(locs) == 0 {
+ // Remove sample with no locations (by not adding it to s).
+ continue
+ }
+ sample.Location = locs
+ }
+ s = append(s, sample)
+ }
+ }
+ p.Sample = s
+
+ return
+}
+
+// ShowFrom drops all stack frames above the highest matching frame and returns
+// whether a match was found. If showFrom is nil it returns false and does not
+// modify the profile.
+//
+// Example: consider a sample with frames [A, B, C, B], where A is the root.
+// ShowFrom(nil) returns false and has frames [A, B, C, B].
+// ShowFrom(A) returns true and has frames [A, B, C, B].
+// ShowFrom(B) returns true and has frames [B, C, B].
+// ShowFrom(C) returns true and has frames [C, B].
+// ShowFrom(D) returns false and drops the sample because no frames remain.
+func (p *Profile) ShowFrom(showFrom *regexp.Regexp) (matched bool) {
+ if showFrom == nil {
+ return false
+ }
+ // showFromLocs stores location IDs that matched ShowFrom.
+ showFromLocs := make(map[uint64]bool)
+ // Apply to locations.
+ for _, loc := range p.Location {
+ if filterShowFromLocation(loc, showFrom) {
+ showFromLocs[loc.ID] = true
+ matched = true
+ }
+ }
+ // For all samples, strip locations after the highest matching one.
+ s := make([]*Sample, 0, len(p.Sample))
+ for _, sample := range p.Sample {
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ if showFromLocs[sample.Location[i].ID] {
+ sample.Location = sample.Location[:i+1]
+ s = append(s, sample)
+ break
+ }
+ }
+ }
+ p.Sample = s
+ return matched
+}
+
+// filterShowFromLocation tests a showFrom regex against a location, removes
+// lines after the last match and returns whether a match was found. If the
+// mapping is matched, then all lines are kept.
+func filterShowFromLocation(loc *Location, showFrom *regexp.Regexp) bool {
+ if m := loc.Mapping; m != nil && showFrom.MatchString(m.File) {
+ return true
+ }
+ if i := loc.lastMatchedLineIndex(showFrom); i >= 0 {
+ loc.Line = loc.Line[:i+1]
+ return true
+ }
+ return false
+}
+
+// lastMatchedLineIndex returns the index of the last line that matches a regex,
+// or -1 if no match is found.
+func (loc *Location) lastMatchedLineIndex(re *regexp.Regexp) int {
+ for i := len(loc.Line) - 1; i >= 0; i-- {
+ if fn := loc.Line[i].Function; fn != nil {
+ if re.MatchString(fn.Name) || re.MatchString(fn.Filename) {
+ return i
+ }
+ }
+ }
+ return -1
+}
+
+// FilterTagsByName filters the tags in a profile and only keeps
+// tags that match show and not hide.
+func (p *Profile) FilterTagsByName(show, hide *regexp.Regexp) (sm, hm bool) {
+ matchRemove := func(name string) bool {
+ matchShow := show == nil || show.MatchString(name)
+ matchHide := hide != nil && hide.MatchString(name)
+
+ if matchShow {
+ sm = true
+ }
+ if matchHide {
+ hm = true
+ }
+ return !matchShow || matchHide
+ }
+ for _, s := range p.Sample {
+ for lab := range s.Label {
+ if matchRemove(lab) {
+ delete(s.Label, lab)
+ }
+ }
+ for lab := range s.NumLabel {
+ if matchRemove(lab) {
+ delete(s.NumLabel, lab)
+ }
+ }
+ }
+ return
+}
+
+// matchesName returns whether the location matches the regular
+// expression. It checks any available function names, file names, and
+// mapping object filename.
+func (loc *Location) matchesName(re *regexp.Regexp) bool {
+ for _, ln := range loc.Line {
+ if fn := ln.Function; fn != nil {
+ if re.MatchString(fn.Name) || re.MatchString(fn.Filename) {
+ return true
+ }
+ }
+ }
+ if m := loc.Mapping; m != nil && re.MatchString(m.File) {
+ return true
+ }
+ return false
+}
+
+// unmatchedLines returns the lines in the location that do not match
+// the regular expression.
+func (loc *Location) unmatchedLines(re *regexp.Regexp) []Line {
+ if m := loc.Mapping; m != nil && re.MatchString(m.File) {
+ return nil
+ }
+ var lines []Line
+ for _, ln := range loc.Line {
+ if fn := ln.Function; fn != nil {
+ if re.MatchString(fn.Name) || re.MatchString(fn.Filename) {
+ continue
+ }
+ }
+ lines = append(lines, ln)
+ }
+ return lines
+}
+
+// matchedLines returns the lines in the location that match
+// the regular expression.
+func (loc *Location) matchedLines(re *regexp.Regexp) []Line {
+ if m := loc.Mapping; m != nil && re.MatchString(m.File) {
+ return loc.Line
+ }
+ var lines []Line
+ for _, ln := range loc.Line {
+ if fn := ln.Function; fn != nil {
+ if !re.MatchString(fn.Name) && !re.MatchString(fn.Filename) {
+ continue
+ }
+ }
+ lines = append(lines, ln)
+ }
+ return lines
+}
+
+// focusedAndNotIgnored looks up a slice of ids against a map of
+// focused/ignored locations. The map only contains locations that are
+// explicitly focused or ignored. Returns whether there is at least
+// one focused location but no ignored locations.
+func focusedAndNotIgnored(locs []*Location, m map[uint64]bool) bool {
+ var f bool
+ for _, loc := range locs {
+ if focus, focusOrIgnore := m[loc.ID]; focusOrIgnore {
+ if focus {
+ // Found focused location. Must keep searching in case there
+ // is an ignored one as well.
+ f = true
+ } else {
+ // Found ignored location. Can return false right away.
+ return false
+ }
+ }
+ }
+ return f
+}
+
+// TagMatch selects tags for filtering
+type TagMatch func(s *Sample) bool
+
+// FilterSamplesByTag removes all samples from the profile, except
+// those that match focus and do not match the ignore regular
+// expression.
+func (p *Profile) FilterSamplesByTag(focus, ignore TagMatch) (fm, im bool) {
+ samples := make([]*Sample, 0, len(p.Sample))
+ for _, s := range p.Sample {
+ focused, ignored := true, false
+ if focus != nil {
+ focused = focus(s)
+ }
+ if ignore != nil {
+ ignored = ignore(s)
+ }
+ fm = fm || focused
+ im = im || ignored
+ if focused && !ignored {
+ samples = append(samples, s)
+ }
+ }
+ p.Sample = samples
+ return
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/index.go b/src/cmd/vendor/github.com/google/pprof/profile/index.go
new file mode 100644
index 0000000..bef1d60
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/index.go
@@ -0,0 +1,64 @@
+// Copyright 2016 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// SampleIndexByName returns the appropriate index for a value of sample index.
+// If numeric, it returns the number, otherwise it looks up the text in the
+// profile sample types.
+func (p *Profile) SampleIndexByName(sampleIndex string) (int, error) {
+ if sampleIndex == "" {
+ if dst := p.DefaultSampleType; dst != "" {
+ for i, t := range sampleTypes(p) {
+ if t == dst {
+ return i, nil
+ }
+ }
+ }
+ // By default select the last sample value
+ return len(p.SampleType) - 1, nil
+ }
+ if i, err := strconv.Atoi(sampleIndex); err == nil {
+ if i < 0 || i >= len(p.SampleType) {
+ return 0, fmt.Errorf("sample_index %s is outside the range [0..%d]", sampleIndex, len(p.SampleType)-1)
+ }
+ return i, nil
+ }
+
+ // Remove the inuse_ prefix to support legacy pprof options
+ // "inuse_space" and "inuse_objects" for profiles containing types
+ // "space" and "objects".
+ noInuse := strings.TrimPrefix(sampleIndex, "inuse_")
+ for i, t := range p.SampleType {
+ if t.Type == sampleIndex || t.Type == noInuse {
+ return i, nil
+ }
+ }
+
+ return 0, fmt.Errorf("sample_index %q must be one of: %v", sampleIndex, sampleTypes(p))
+}
+
+func sampleTypes(p *Profile) []string {
+ types := make([]string, len(p.SampleType))
+ for i, t := range p.SampleType {
+ types[i] = t.Type
+ }
+ return types
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go b/src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go
new file mode 100644
index 0000000..91f45e5
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/legacy_java_profile.go
@@ -0,0 +1,315 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file implements parsers to convert java legacy profiles into
+// the profile.proto format.
+
+package profile
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var (
+ attributeRx = regexp.MustCompile(`([\w ]+)=([\w ]+)`)
+ javaSampleRx = regexp.MustCompile(` *(\d+) +(\d+) +@ +([ x0-9a-f]*)`)
+ javaLocationRx = regexp.MustCompile(`^\s*0x([[:xdigit:]]+)\s+(.*)\s*$`)
+ javaLocationFileLineRx = regexp.MustCompile(`^(.*)\s+\((.+):(-?[[:digit:]]+)\)$`)
+ javaLocationPathRx = regexp.MustCompile(`^(.*)\s+\((.*)\)$`)
+)
+
+// javaCPUProfile returns a new Profile from profilez data.
+// b is the profile bytes after the header, period is the profiling
+// period, and parse is a function to parse 8-byte chunks from the
+// profile in its native endianness.
+func javaCPUProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte)) (*Profile, error) {
+ p := &Profile{
+ Period: period * 1000,
+ PeriodType: &ValueType{Type: "cpu", Unit: "nanoseconds"},
+ SampleType: []*ValueType{{Type: "samples", Unit: "count"}, {Type: "cpu", Unit: "nanoseconds"}},
+ }
+ var err error
+ var locs map[uint64]*Location
+ if b, locs, err = parseCPUSamples(b, parse, false, p); err != nil {
+ return nil, err
+ }
+
+ if err = parseJavaLocations(b, locs, p); err != nil {
+ return nil, err
+ }
+
+ // Strip out addresses for better merge.
+ if err = p.Aggregate(true, true, true, true, false); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+// parseJavaProfile returns a new profile from heapz or contentionz
+// data. b is the profile bytes after the header.
+func parseJavaProfile(b []byte) (*Profile, error) {
+ h := bytes.SplitAfterN(b, []byte("\n"), 2)
+ if len(h) < 2 {
+ return nil, errUnrecognized
+ }
+
+ p := &Profile{
+ PeriodType: &ValueType{},
+ }
+ header := string(bytes.TrimSpace(h[0]))
+
+ var err error
+ var pType string
+ switch header {
+ case "--- heapz 1 ---":
+ pType = "heap"
+ case "--- contentionz 1 ---":
+ pType = "contention"
+ default:
+ return nil, errUnrecognized
+ }
+
+ if b, err = parseJavaHeader(pType, h[1], p); err != nil {
+ return nil, err
+ }
+ var locs map[uint64]*Location
+ if b, locs, err = parseJavaSamples(pType, b, p); err != nil {
+ return nil, err
+ }
+ if err = parseJavaLocations(b, locs, p); err != nil {
+ return nil, err
+ }
+
+ // Strip out addresses for better merge.
+ if err = p.Aggregate(true, true, true, true, false); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+// parseJavaHeader parses the attribute section on a java profile and
+// populates a profile. Returns the remainder of the buffer after all
+// attributes.
+func parseJavaHeader(pType string, b []byte, p *Profile) ([]byte, error) {
+ nextNewLine := bytes.IndexByte(b, byte('\n'))
+ for nextNewLine != -1 {
+ line := string(bytes.TrimSpace(b[0:nextNewLine]))
+ if line != "" {
+ h := attributeRx.FindStringSubmatch(line)
+ if h == nil {
+ // Not a valid attribute, exit.
+ return b, nil
+ }
+
+ attribute, value := strings.TrimSpace(h[1]), strings.TrimSpace(h[2])
+ var err error
+ switch pType + "/" + attribute {
+ case "heap/format", "cpu/format", "contention/format":
+ if value != "java" {
+ return nil, errUnrecognized
+ }
+ case "heap/resolution":
+ p.SampleType = []*ValueType{
+ {Type: "inuse_objects", Unit: "count"},
+ {Type: "inuse_space", Unit: value},
+ }
+ case "contention/resolution":
+ p.SampleType = []*ValueType{
+ {Type: "contentions", Unit: "count"},
+ {Type: "delay", Unit: value},
+ }
+ case "contention/sampling period":
+ p.PeriodType = &ValueType{
+ Type: "contentions", Unit: "count",
+ }
+ if p.Period, err = strconv.ParseInt(value, 0, 64); err != nil {
+ return nil, fmt.Errorf("failed to parse attribute %s: %v", line, err)
+ }
+ case "contention/ms since reset":
+ millis, err := strconv.ParseInt(value, 0, 64)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse attribute %s: %v", line, err)
+ }
+ p.DurationNanos = millis * 1000 * 1000
+ default:
+ return nil, errUnrecognized
+ }
+ }
+ // Grab next line.
+ b = b[nextNewLine+1:]
+ nextNewLine = bytes.IndexByte(b, byte('\n'))
+ }
+ return b, nil
+}
+
+// parseJavaSamples parses the samples from a java profile and
+// populates the Samples in a profile. Returns the remainder of the
+// buffer after the samples.
+func parseJavaSamples(pType string, b []byte, p *Profile) ([]byte, map[uint64]*Location, error) {
+ nextNewLine := bytes.IndexByte(b, byte('\n'))
+ locs := make(map[uint64]*Location)
+ for nextNewLine != -1 {
+ line := string(bytes.TrimSpace(b[0:nextNewLine]))
+ if line != "" {
+ sample := javaSampleRx.FindStringSubmatch(line)
+ if sample == nil {
+ // Not a valid sample, exit.
+ return b, locs, nil
+ }
+
+ // Java profiles have data/fields inverted compared to other
+ // profile types.
+ var err error
+ value1, value2, value3 := sample[2], sample[1], sample[3]
+ addrs, err := parseHexAddresses(value3)
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ var sloc []*Location
+ for _, addr := range addrs {
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+ s := &Sample{
+ Value: make([]int64, 2),
+ Location: sloc,
+ }
+
+ if s.Value[0], err = strconv.ParseInt(value1, 0, 64); err != nil {
+ return nil, nil, fmt.Errorf("parsing sample %s: %v", line, err)
+ }
+ if s.Value[1], err = strconv.ParseInt(value2, 0, 64); err != nil {
+ return nil, nil, fmt.Errorf("parsing sample %s: %v", line, err)
+ }
+
+ switch pType {
+ case "heap":
+ const javaHeapzSamplingRate = 524288 // 512K
+ if s.Value[0] == 0 {
+ return nil, nil, fmt.Errorf("parsing sample %s: second value must be non-zero", line)
+ }
+ s.NumLabel = map[string][]int64{"bytes": {s.Value[1] / s.Value[0]}}
+ s.Value[0], s.Value[1] = scaleHeapSample(s.Value[0], s.Value[1], javaHeapzSamplingRate)
+ case "contention":
+ if period := p.Period; period != 0 {
+ s.Value[0] = s.Value[0] * p.Period
+ s.Value[1] = s.Value[1] * p.Period
+ }
+ }
+ p.Sample = append(p.Sample, s)
+ }
+ // Grab next line.
+ b = b[nextNewLine+1:]
+ nextNewLine = bytes.IndexByte(b, byte('\n'))
+ }
+ return b, locs, nil
+}
+
+// parseJavaLocations parses the location information in a java
+// profile and populates the Locations in a profile. It uses the
+// location addresses from the profile as both the ID of each
+// location.
+func parseJavaLocations(b []byte, locs map[uint64]*Location, p *Profile) error {
+ r := bytes.NewBuffer(b)
+ fns := make(map[string]*Function)
+ for {
+ line, err := r.ReadString('\n')
+ if err != nil {
+ if err != io.EOF {
+ return err
+ }
+ if line == "" {
+ break
+ }
+ }
+
+ if line = strings.TrimSpace(line); line == "" {
+ continue
+ }
+
+ jloc := javaLocationRx.FindStringSubmatch(line)
+ if len(jloc) != 3 {
+ continue
+ }
+ addr, err := strconv.ParseUint(jloc[1], 16, 64)
+ if err != nil {
+ return fmt.Errorf("parsing sample %s: %v", line, err)
+ }
+ loc := locs[addr]
+ if loc == nil {
+ // Unused/unseen
+ continue
+ }
+ var lineFunc, lineFile string
+ var lineNo int64
+
+ if fileLine := javaLocationFileLineRx.FindStringSubmatch(jloc[2]); len(fileLine) == 4 {
+ // Found a line of the form: "function (file:line)"
+ lineFunc, lineFile = fileLine[1], fileLine[2]
+ if n, err := strconv.ParseInt(fileLine[3], 10, 64); err == nil && n > 0 {
+ lineNo = n
+ }
+ } else if filePath := javaLocationPathRx.FindStringSubmatch(jloc[2]); len(filePath) == 3 {
+ // If there's not a file:line, it's a shared library path.
+ // The path isn't interesting, so just give the .so.
+ lineFunc, lineFile = filePath[1], filepath.Base(filePath[2])
+ } else if strings.Contains(jloc[2], "generated stub/JIT") {
+ lineFunc = "STUB"
+ } else {
+ // Treat whole line as the function name. This is used by the
+ // java agent for internal states such as "GC" or "VM".
+ lineFunc = jloc[2]
+ }
+ fn := fns[lineFunc]
+
+ if fn == nil {
+ fn = &Function{
+ Name: lineFunc,
+ SystemName: lineFunc,
+ Filename: lineFile,
+ }
+ fns[lineFunc] = fn
+ p.Function = append(p.Function, fn)
+ }
+ loc.Line = []Line{
+ {
+ Function: fn,
+ Line: lineNo,
+ },
+ }
+ loc.Address = 0
+ }
+
+ p.remapLocationIDs()
+ p.remapFunctionIDs()
+ p.remapMappingIDs()
+
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go b/src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go
new file mode 100644
index 0000000..0c8f3bb
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/legacy_profile.go
@@ -0,0 +1,1225 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file implements parsers to convert legacy profiles into the
+// profile.proto format.
+
+package profile
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "math"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var (
+ countStartRE = regexp.MustCompile(`\A(\S+) profile: total \d+\z`)
+ countRE = regexp.MustCompile(`\A(\d+) @(( 0x[0-9a-f]+)+)\z`)
+
+ heapHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] *@ *(heap[_a-z0-9]*)/?(\d*)`)
+ heapSampleRE = regexp.MustCompile(`(-?\d+): *(-?\d+) *\[ *(\d+): *(\d+) *] @([ x0-9a-f]*)`)
+
+ contentionSampleRE = regexp.MustCompile(`(\d+) *(\d+) @([ x0-9a-f]*)`)
+
+ hexNumberRE = regexp.MustCompile(`0x[0-9a-f]+`)
+
+ growthHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] @ growthz?`)
+
+ fragmentationHeaderRE = regexp.MustCompile(`heap profile: *(\d+): *(\d+) *\[ *(\d+): *(\d+) *\] @ fragmentationz?`)
+
+ threadzStartRE = regexp.MustCompile(`--- threadz \d+ ---`)
+ threadStartRE = regexp.MustCompile(`--- Thread ([[:xdigit:]]+) \(name: (.*)/(\d+)\) stack: ---`)
+
+ // Regular expressions to parse process mappings. Support the format used by Linux /proc/.../maps and other tools.
+ // Recommended format:
+ // Start End object file name offset(optional) linker build id
+ // 0x40000-0x80000 /path/to/binary (@FF00) abc123456
+ spaceDigits = `\s+[[:digit:]]+`
+ hexPair = `\s+[[:xdigit:]]+:[[:xdigit:]]+`
+ oSpace = `\s*`
+ // Capturing expressions.
+ cHex = `(?:0x)?([[:xdigit:]]+)`
+ cHexRange = `\s*` + cHex + `[\s-]?` + oSpace + cHex + `:?`
+ cSpaceString = `(?:\s+(\S+))?`
+ cSpaceHex = `(?:\s+([[:xdigit:]]+))?`
+ cSpaceAtOffset = `(?:\s+\(@([[:xdigit:]]+)\))?`
+ cPerm = `(?:\s+([-rwxp]+))?`
+
+ procMapsRE = regexp.MustCompile(`^` + cHexRange + cPerm + cSpaceHex + hexPair + spaceDigits + cSpaceString)
+ briefMapsRE = regexp.MustCompile(`^` + cHexRange + cPerm + cSpaceString + cSpaceAtOffset + cSpaceHex)
+
+ // Regular expression to parse log data, of the form:
+ // ... file:line] msg...
+ logInfoRE = regexp.MustCompile(`^[^\[\]]+:[0-9]+]\s`)
+)
+
+func isSpaceOrComment(line string) bool {
+ trimmed := strings.TrimSpace(line)
+ return len(trimmed) == 0 || trimmed[0] == '#'
+}
+
+// parseGoCount parses a Go count profile (e.g., threadcreate or
+// goroutine) and returns a new Profile.
+func parseGoCount(b []byte) (*Profile, error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ // Skip comments at the beginning of the file.
+ for s.Scan() && isSpaceOrComment(s.Text()) {
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ m := countStartRE.FindStringSubmatch(s.Text())
+ if m == nil {
+ return nil, errUnrecognized
+ }
+ profileType := m[1]
+ p := &Profile{
+ PeriodType: &ValueType{Type: profileType, Unit: "count"},
+ Period: 1,
+ SampleType: []*ValueType{{Type: profileType, Unit: "count"}},
+ }
+ locations := make(map[uint64]*Location)
+ for s.Scan() {
+ line := s.Text()
+ if isSpaceOrComment(line) {
+ continue
+ }
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ m := countRE.FindStringSubmatch(line)
+ if m == nil {
+ return nil, errMalformed
+ }
+ n, err := strconv.ParseInt(m[1], 0, 64)
+ if err != nil {
+ return nil, errMalformed
+ }
+ fields := strings.Fields(m[2])
+ locs := make([]*Location, 0, len(fields))
+ for _, stk := range fields {
+ addr, err := strconv.ParseUint(stk, 0, 64)
+ if err != nil {
+ return nil, errMalformed
+ }
+ // Adjust all frames by -1 to land on top of the call instruction.
+ addr--
+ loc := locations[addr]
+ if loc == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ locations[addr] = loc
+ p.Location = append(p.Location, loc)
+ }
+ locs = append(locs, loc)
+ }
+ p.Sample = append(p.Sample, &Sample{
+ Location: locs,
+ Value: []int64{n},
+ })
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+ return p, nil
+}
+
+// remapLocationIDs ensures there is a location for each address
+// referenced by a sample, and remaps the samples to point to the new
+// location ids.
+func (p *Profile) remapLocationIDs() {
+ seen := make(map[*Location]bool, len(p.Location))
+ var locs []*Location
+
+ for _, s := range p.Sample {
+ for _, l := range s.Location {
+ if seen[l] {
+ continue
+ }
+ l.ID = uint64(len(locs) + 1)
+ locs = append(locs, l)
+ seen[l] = true
+ }
+ }
+ p.Location = locs
+}
+
+func (p *Profile) remapFunctionIDs() {
+ seen := make(map[*Function]bool, len(p.Function))
+ var fns []*Function
+
+ for _, l := range p.Location {
+ for _, ln := range l.Line {
+ fn := ln.Function
+ if fn == nil || seen[fn] {
+ continue
+ }
+ fn.ID = uint64(len(fns) + 1)
+ fns = append(fns, fn)
+ seen[fn] = true
+ }
+ }
+ p.Function = fns
+}
+
+// remapMappingIDs matches location addresses with existing mappings
+// and updates them appropriately. This is O(N*M), if this ever shows
+// up as a bottleneck, evaluate sorting the mappings and doing a
+// binary search, which would make it O(N*log(M)).
+func (p *Profile) remapMappingIDs() {
+ // Some profile handlers will incorrectly set regions for the main
+ // executable if its section is remapped. Fix them through heuristics.
+
+ if len(p.Mapping) > 0 {
+ // Remove the initial mapping if named '/anon_hugepage' and has a
+ // consecutive adjacent mapping.
+ if m := p.Mapping[0]; strings.HasPrefix(m.File, "/anon_hugepage") {
+ if len(p.Mapping) > 1 && m.Limit == p.Mapping[1].Start {
+ p.Mapping = p.Mapping[1:]
+ }
+ }
+ }
+
+ // Subtract the offset from the start of the main mapping if it
+ // ends up at a recognizable start address.
+ if len(p.Mapping) > 0 {
+ const expectedStart = 0x400000
+ if m := p.Mapping[0]; m.Start-m.Offset == expectedStart {
+ m.Start = expectedStart
+ m.Offset = 0
+ }
+ }
+
+ // Associate each location with an address to the corresponding
+ // mapping. Create fake mapping if a suitable one isn't found.
+ var fake *Mapping
+nextLocation:
+ for _, l := range p.Location {
+ a := l.Address
+ if l.Mapping != nil || a == 0 {
+ continue
+ }
+ for _, m := range p.Mapping {
+ if m.Start <= a && a < m.Limit {
+ l.Mapping = m
+ continue nextLocation
+ }
+ }
+ // Work around legacy handlers failing to encode the first
+ // part of mappings split into adjacent ranges.
+ for _, m := range p.Mapping {
+ if m.Offset != 0 && m.Start-m.Offset <= a && a < m.Start {
+ m.Start -= m.Offset
+ m.Offset = 0
+ l.Mapping = m
+ continue nextLocation
+ }
+ }
+ // If there is still no mapping, create a fake one.
+ // This is important for the Go legacy handler, which produced
+ // no mappings.
+ if fake == nil {
+ fake = &Mapping{
+ ID: 1,
+ Limit: ^uint64(0),
+ }
+ p.Mapping = append(p.Mapping, fake)
+ }
+ l.Mapping = fake
+ }
+
+ // Reset all mapping IDs.
+ for i, m := range p.Mapping {
+ m.ID = uint64(i + 1)
+ }
+}
+
+var cpuInts = []func([]byte) (uint64, []byte){
+ get32l,
+ get32b,
+ get64l,
+ get64b,
+}
+
+func get32l(b []byte) (uint64, []byte) {
+ if len(b) < 4 {
+ return 0, nil
+ }
+ return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24, b[4:]
+}
+
+func get32b(b []byte) (uint64, []byte) {
+ if len(b) < 4 {
+ return 0, nil
+ }
+ return uint64(b[3]) | uint64(b[2])<<8 | uint64(b[1])<<16 | uint64(b[0])<<24, b[4:]
+}
+
+func get64l(b []byte) (uint64, []byte) {
+ if len(b) < 8 {
+ return 0, nil
+ }
+ return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56, b[8:]
+}
+
+func get64b(b []byte) (uint64, []byte) {
+ if len(b) < 8 {
+ return 0, nil
+ }
+ return uint64(b[7]) | uint64(b[6])<<8 | uint64(b[5])<<16 | uint64(b[4])<<24 | uint64(b[3])<<32 | uint64(b[2])<<40 | uint64(b[1])<<48 | uint64(b[0])<<56, b[8:]
+}
+
+// parseCPU parses a profilez legacy profile and returns a newly
+// populated Profile.
+//
+// The general format for profilez samples is a sequence of words in
+// binary format. The first words are a header with the following data:
+// 1st word -- 0
+// 2nd word -- 3
+// 3rd word -- 0 if a c++ application, 1 if a java application.
+// 4th word -- Sampling period (in microseconds).
+// 5th word -- Padding.
+func parseCPU(b []byte) (*Profile, error) {
+ var parse func([]byte) (uint64, []byte)
+ var n1, n2, n3, n4, n5 uint64
+ for _, parse = range cpuInts {
+ var tmp []byte
+ n1, tmp = parse(b)
+ n2, tmp = parse(tmp)
+ n3, tmp = parse(tmp)
+ n4, tmp = parse(tmp)
+ n5, tmp = parse(tmp)
+
+ if tmp != nil && n1 == 0 && n2 == 3 && n3 == 0 && n4 > 0 && n5 == 0 {
+ b = tmp
+ return cpuProfile(b, int64(n4), parse)
+ }
+ if tmp != nil && n1 == 0 && n2 == 3 && n3 == 1 && n4 > 0 && n5 == 0 {
+ b = tmp
+ return javaCPUProfile(b, int64(n4), parse)
+ }
+ }
+ return nil, errUnrecognized
+}
+
+// cpuProfile returns a new Profile from C++ profilez data.
+// b is the profile bytes after the header, period is the profiling
+// period, and parse is a function to parse 8-byte chunks from the
+// profile in its native endianness.
+func cpuProfile(b []byte, period int64, parse func(b []byte) (uint64, []byte)) (*Profile, error) {
+ p := &Profile{
+ Period: period * 1000,
+ PeriodType: &ValueType{Type: "cpu", Unit: "nanoseconds"},
+ SampleType: []*ValueType{
+ {Type: "samples", Unit: "count"},
+ {Type: "cpu", Unit: "nanoseconds"},
+ },
+ }
+ var err error
+ if b, _, err = parseCPUSamples(b, parse, true, p); err != nil {
+ return nil, err
+ }
+
+ // If *most* samples have the same second-to-the-bottom frame, it
+ // strongly suggests that it is an uninteresting artifact of
+ // measurement -- a stack frame pushed by the signal handler. The
+ // bottom frame is always correct as it is picked up from the signal
+ // structure, not the stack. Check if this is the case and if so,
+ // remove.
+
+ // Remove up to two frames.
+ maxiter := 2
+ // Allow one different sample for this many samples with the same
+ // second-to-last frame.
+ similarSamples := 32
+ margin := len(p.Sample) / similarSamples
+
+ for iter := 0; iter < maxiter; iter++ {
+ addr1 := make(map[uint64]int)
+ for _, s := range p.Sample {
+ if len(s.Location) > 1 {
+ a := s.Location[1].Address
+ addr1[a] = addr1[a] + 1
+ }
+ }
+
+ for id1, count := range addr1 {
+ if count >= len(p.Sample)-margin {
+ // Found uninteresting frame, strip it out from all samples
+ for _, s := range p.Sample {
+ if len(s.Location) > 1 && s.Location[1].Address == id1 {
+ s.Location = append(s.Location[:1], s.Location[2:]...)
+ }
+ }
+ break
+ }
+ }
+ }
+
+ if err := p.ParseMemoryMap(bytes.NewBuffer(b)); err != nil {
+ return nil, err
+ }
+
+ cleanupDuplicateLocations(p)
+ return p, nil
+}
+
+func cleanupDuplicateLocations(p *Profile) {
+ // The profile handler may duplicate the leaf frame, because it gets
+ // its address both from stack unwinding and from the signal
+ // context. Detect this and delete the duplicate, which has been
+ // adjusted by -1. The leaf address should not be adjusted as it is
+ // not a call.
+ for _, s := range p.Sample {
+ if len(s.Location) > 1 && s.Location[0].Address == s.Location[1].Address+1 {
+ s.Location = append(s.Location[:1], s.Location[2:]...)
+ }
+ }
+}
+
+// parseCPUSamples parses a collection of profilez samples from a
+// profile.
+//
+// profilez samples are a repeated sequence of stack frames of the
+// form:
+// 1st word -- The number of times this stack was encountered.
+// 2nd word -- The size of the stack (StackSize).
+// 3rd word -- The first address on the stack.
+// ...
+// StackSize + 2 -- The last address on the stack
+// The last stack trace is of the form:
+// 1st word -- 0
+// 2nd word -- 1
+// 3rd word -- 0
+//
+// Addresses from stack traces may point to the next instruction after
+// each call. Optionally adjust by -1 to land somewhere on the actual
+// call (except for the leaf, which is not a call).
+func parseCPUSamples(b []byte, parse func(b []byte) (uint64, []byte), adjust bool, p *Profile) ([]byte, map[uint64]*Location, error) {
+ locs := make(map[uint64]*Location)
+ for len(b) > 0 {
+ var count, nstk uint64
+ count, b = parse(b)
+ nstk, b = parse(b)
+ if b == nil || nstk > uint64(len(b)/4) {
+ return nil, nil, errUnrecognized
+ }
+ var sloc []*Location
+ addrs := make([]uint64, nstk)
+ for i := 0; i < int(nstk); i++ {
+ addrs[i], b = parse(b)
+ }
+
+ if count == 0 && nstk == 1 && addrs[0] == 0 {
+ // End of data marker
+ break
+ }
+ for i, addr := range addrs {
+ if adjust && i > 0 {
+ addr--
+ }
+ loc := locs[addr]
+ if loc == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ locs[addr] = loc
+ p.Location = append(p.Location, loc)
+ }
+ sloc = append(sloc, loc)
+ }
+ p.Sample = append(p.Sample,
+ &Sample{
+ Value: []int64{int64(count), int64(count) * p.Period},
+ Location: sloc,
+ })
+ }
+ // Reached the end without finding the EOD marker.
+ return b, locs, nil
+}
+
+// parseHeap parses a heapz legacy or a growthz profile and
+// returns a newly populated Profile.
+func parseHeap(b []byte) (p *Profile, err error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ if !s.Scan() {
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return nil, errUnrecognized
+ }
+ p = &Profile{}
+
+ sampling := ""
+ hasAlloc := false
+
+ line := s.Text()
+ p.PeriodType = &ValueType{Type: "space", Unit: "bytes"}
+ if header := heapHeaderRE.FindStringSubmatch(line); header != nil {
+ sampling, p.Period, hasAlloc, err = parseHeapHeader(line)
+ if err != nil {
+ return nil, err
+ }
+ } else if header = growthHeaderRE.FindStringSubmatch(line); header != nil {
+ p.Period = 1
+ } else if header = fragmentationHeaderRE.FindStringSubmatch(line); header != nil {
+ p.Period = 1
+ } else {
+ return nil, errUnrecognized
+ }
+
+ if hasAlloc {
+ // Put alloc before inuse so that default pprof selection
+ // will prefer inuse_space.
+ p.SampleType = []*ValueType{
+ {Type: "alloc_objects", Unit: "count"},
+ {Type: "alloc_space", Unit: "bytes"},
+ {Type: "inuse_objects", Unit: "count"},
+ {Type: "inuse_space", Unit: "bytes"},
+ }
+ } else {
+ p.SampleType = []*ValueType{
+ {Type: "objects", Unit: "count"},
+ {Type: "space", Unit: "bytes"},
+ }
+ }
+
+ locs := make(map[uint64]*Location)
+ for s.Scan() {
+ line := strings.TrimSpace(s.Text())
+
+ if isSpaceOrComment(line) {
+ continue
+ }
+
+ if isMemoryMapSentinel(line) {
+ break
+ }
+
+ value, blocksize, addrs, err := parseHeapSample(line, p.Period, sampling, hasAlloc)
+ if err != nil {
+ return nil, err
+ }
+
+ var sloc []*Location
+ for _, addr := range addrs {
+ // Addresses from stack traces point to the next instruction after
+ // each call. Adjust by -1 to land somewhere on the actual call.
+ addr--
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+
+ p.Sample = append(p.Sample, &Sample{
+ Value: value,
+ Location: sloc,
+ NumLabel: map[string][]int64{"bytes": {blocksize}},
+ })
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+ return p, nil
+}
+
+func parseHeapHeader(line string) (sampling string, period int64, hasAlloc bool, err error) {
+ header := heapHeaderRE.FindStringSubmatch(line)
+ if header == nil {
+ return "", 0, false, errUnrecognized
+ }
+
+ if len(header[6]) > 0 {
+ if period, err = strconv.ParseInt(header[6], 10, 64); err != nil {
+ return "", 0, false, errUnrecognized
+ }
+ }
+
+ if (header[3] != header[1] && header[3] != "0") || (header[4] != header[2] && header[4] != "0") {
+ hasAlloc = true
+ }
+
+ switch header[5] {
+ case "heapz_v2", "heap_v2":
+ return "v2", period, hasAlloc, nil
+ case "heapprofile":
+ return "", 1, hasAlloc, nil
+ case "heap":
+ return "v2", period / 2, hasAlloc, nil
+ default:
+ return "", 0, false, errUnrecognized
+ }
+}
+
+// parseHeapSample parses a single row from a heap profile into a new Sample.
+func parseHeapSample(line string, rate int64, sampling string, includeAlloc bool) (value []int64, blocksize int64, addrs []uint64, err error) {
+ sampleData := heapSampleRE.FindStringSubmatch(line)
+ if len(sampleData) != 6 {
+ return nil, 0, nil, fmt.Errorf("unexpected number of sample values: got %d, want 6", len(sampleData))
+ }
+
+ // This is a local-scoped helper function to avoid needing to pass
+ // around rate, sampling and many return parameters.
+ addValues := func(countString, sizeString string, label string) error {
+ count, err := strconv.ParseInt(countString, 10, 64)
+ if err != nil {
+ return fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ size, err := strconv.ParseInt(sizeString, 10, 64)
+ if err != nil {
+ return fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ if count == 0 && size != 0 {
+ return fmt.Errorf("%s count was 0 but %s bytes was %d", label, label, size)
+ }
+ if count != 0 {
+ blocksize = size / count
+ if sampling == "v2" {
+ count, size = scaleHeapSample(count, size, rate)
+ }
+ }
+ value = append(value, count, size)
+ return nil
+ }
+
+ if includeAlloc {
+ if err := addValues(sampleData[3], sampleData[4], "allocation"); err != nil {
+ return nil, 0, nil, err
+ }
+ }
+
+ if err := addValues(sampleData[1], sampleData[2], "inuse"); err != nil {
+ return nil, 0, nil, err
+ }
+
+ addrs, err = parseHexAddresses(sampleData[5])
+ if err != nil {
+ return nil, 0, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ return value, blocksize, addrs, nil
+}
+
+// parseHexAddresses extracts hex numbers from a string, attempts to convert
+// each to an unsigned 64-bit number and returns the resulting numbers as a
+// slice, or an error if the string contains hex numbers which are too large to
+// handle (which means a malformed profile).
+func parseHexAddresses(s string) ([]uint64, error) {
+ hexStrings := hexNumberRE.FindAllString(s, -1)
+ var addrs []uint64
+ for _, s := range hexStrings {
+ if addr, err := strconv.ParseUint(s, 0, 64); err == nil {
+ addrs = append(addrs, addr)
+ } else {
+ return nil, fmt.Errorf("failed to parse as hex 64-bit number: %s", s)
+ }
+ }
+ return addrs, nil
+}
+
+// scaleHeapSample adjusts the data from a heapz Sample to
+// account for its probability of appearing in the collected
+// data. heapz profiles are a sampling of the memory allocations
+// requests in a program. We estimate the unsampled value by dividing
+// each collected sample by its probability of appearing in the
+// profile. heapz v2 profiles rely on a poisson process to determine
+// which samples to collect, based on the desired average collection
+// rate R. The probability of a sample of size S to appear in that
+// profile is 1-exp(-S/R).
+func scaleHeapSample(count, size, rate int64) (int64, int64) {
+ if count == 0 || size == 0 {
+ return 0, 0
+ }
+
+ if rate <= 1 {
+ // if rate==1 all samples were collected so no adjustment is needed.
+ // if rate<1 treat as unknown and skip scaling.
+ return count, size
+ }
+
+ avgSize := float64(size) / float64(count)
+ scale := 1 / (1 - math.Exp(-avgSize/float64(rate)))
+
+ return int64(float64(count) * scale), int64(float64(size) * scale)
+}
+
+// parseContention parses a mutex or contention profile. There are 2 cases:
+// "--- contentionz " for legacy C++ profiles (and backwards compatibility)
+// "--- mutex:" or "--- contention:" for profiles generated by the Go runtime.
+func parseContention(b []byte) (*Profile, error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ if !s.Scan() {
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return nil, errUnrecognized
+ }
+
+ switch l := s.Text(); {
+ case strings.HasPrefix(l, "--- contentionz "):
+ case strings.HasPrefix(l, "--- mutex:"):
+ case strings.HasPrefix(l, "--- contention:"):
+ default:
+ return nil, errUnrecognized
+ }
+
+ p := &Profile{
+ PeriodType: &ValueType{Type: "contentions", Unit: "count"},
+ Period: 1,
+ SampleType: []*ValueType{
+ {Type: "contentions", Unit: "count"},
+ {Type: "delay", Unit: "nanoseconds"},
+ },
+ }
+
+ var cpuHz int64
+ // Parse text of the form "attribute = value" before the samples.
+ const delimiter = "="
+ for s.Scan() {
+ line := s.Text()
+ if line = strings.TrimSpace(line); isSpaceOrComment(line) {
+ continue
+ }
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ attr := strings.SplitN(line, delimiter, 2)
+ if len(attr) != 2 {
+ break
+ }
+ key, val := strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1])
+ var err error
+ switch key {
+ case "cycles/second":
+ if cpuHz, err = strconv.ParseInt(val, 0, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ case "sampling period":
+ if p.Period, err = strconv.ParseInt(val, 0, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ case "ms since reset":
+ ms, err := strconv.ParseInt(val, 0, 64)
+ if err != nil {
+ return nil, errUnrecognized
+ }
+ p.DurationNanos = ms * 1000 * 1000
+ case "format":
+ // CPP contentionz profiles don't have format.
+ return nil, errUnrecognized
+ case "resolution":
+ // CPP contentionz profiles don't have resolution.
+ return nil, errUnrecognized
+ case "discarded samples":
+ default:
+ return nil, errUnrecognized
+ }
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+
+ locs := make(map[uint64]*Location)
+ for {
+ line := strings.TrimSpace(s.Text())
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ if !isSpaceOrComment(line) {
+ value, addrs, err := parseContentionSample(line, p.Period, cpuHz)
+ if err != nil {
+ return nil, err
+ }
+ var sloc []*Location
+ for _, addr := range addrs {
+ // Addresses from stack traces point to the next instruction after
+ // each call. Adjust by -1 to land somewhere on the actual call.
+ addr--
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+ p.Sample = append(p.Sample, &Sample{
+ Value: value,
+ Location: sloc,
+ })
+ }
+ if !s.Scan() {
+ break
+ }
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+// parseContentionSample parses a single row from a contention profile
+// into a new Sample.
+func parseContentionSample(line string, period, cpuHz int64) (value []int64, addrs []uint64, err error) {
+ sampleData := contentionSampleRE.FindStringSubmatch(line)
+ if sampleData == nil {
+ return nil, nil, errUnrecognized
+ }
+
+ v1, err := strconv.ParseInt(sampleData[1], 10, 64)
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ v2, err := strconv.ParseInt(sampleData[2], 10, 64)
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ // Unsample values if period and cpuHz are available.
+ // - Delays are scaled to cycles and then to nanoseconds.
+ // - Contentions are scaled to cycles.
+ if period > 0 {
+ if cpuHz > 0 {
+ cpuGHz := float64(cpuHz) / 1e9
+ v1 = int64(float64(v1) * float64(period) / cpuGHz)
+ }
+ v2 = v2 * period
+ }
+
+ value = []int64{v2, v1}
+ addrs, err = parseHexAddresses(sampleData[3])
+ if err != nil {
+ return nil, nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+
+ return value, addrs, nil
+}
+
+// parseThread parses a Threadz profile and returns a new Profile.
+func parseThread(b []byte) (*Profile, error) {
+ s := bufio.NewScanner(bytes.NewBuffer(b))
+ // Skip past comments and empty lines seeking a real header.
+ for s.Scan() && isSpaceOrComment(s.Text()) {
+ }
+
+ line := s.Text()
+ if m := threadzStartRE.FindStringSubmatch(line); m != nil {
+ // Advance over initial comments until first stack trace.
+ for s.Scan() {
+ if line = s.Text(); isMemoryMapSentinel(line) || strings.HasPrefix(line, "-") {
+ break
+ }
+ }
+ } else if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 {
+ return nil, errUnrecognized
+ }
+
+ p := &Profile{
+ SampleType: []*ValueType{{Type: "thread", Unit: "count"}},
+ PeriodType: &ValueType{Type: "thread", Unit: "count"},
+ Period: 1,
+ }
+
+ locs := make(map[uint64]*Location)
+ // Recognize each thread and populate profile samples.
+ for !isMemoryMapSentinel(line) {
+ if strings.HasPrefix(line, "---- no stack trace for") {
+ line = ""
+ break
+ }
+ if t := threadStartRE.FindStringSubmatch(line); len(t) != 4 {
+ return nil, errUnrecognized
+ }
+
+ var addrs []uint64
+ var err error
+ line, addrs, err = parseThreadSample(s)
+ if err != nil {
+ return nil, err
+ }
+ if len(addrs) == 0 {
+ // We got a --same as previous threads--. Bump counters.
+ if len(p.Sample) > 0 {
+ s := p.Sample[len(p.Sample)-1]
+ s.Value[0]++
+ }
+ continue
+ }
+
+ var sloc []*Location
+ for i, addr := range addrs {
+ // Addresses from stack traces point to the next instruction after
+ // each call. Adjust by -1 to land somewhere on the actual call
+ // (except for the leaf, which is not a call).
+ if i > 0 {
+ addr--
+ }
+ loc := locs[addr]
+ if locs[addr] == nil {
+ loc = &Location{
+ Address: addr,
+ }
+ p.Location = append(p.Location, loc)
+ locs[addr] = loc
+ }
+ sloc = append(sloc, loc)
+ }
+
+ p.Sample = append(p.Sample, &Sample{
+ Value: []int64{1},
+ Location: sloc,
+ })
+ }
+
+ if err := parseAdditionalSections(s, p); err != nil {
+ return nil, err
+ }
+
+ cleanupDuplicateLocations(p)
+ return p, nil
+}
+
+// parseThreadSample parses a symbolized or unsymbolized stack trace.
+// Returns the first line after the traceback, the sample (or nil if
+// it hits a 'same-as-previous' marker) and an error.
+func parseThreadSample(s *bufio.Scanner) (nextl string, addrs []uint64, err error) {
+ var line string
+ sameAsPrevious := false
+ for s.Scan() {
+ line = strings.TrimSpace(s.Text())
+ if line == "" {
+ continue
+ }
+
+ if strings.HasPrefix(line, "---") {
+ break
+ }
+ if strings.Contains(line, "same as previous thread") {
+ sameAsPrevious = true
+ continue
+ }
+
+ curAddrs, err := parseHexAddresses(line)
+ if err != nil {
+ return "", nil, fmt.Errorf("malformed sample: %s: %v", line, err)
+ }
+ addrs = append(addrs, curAddrs...)
+ }
+ if err := s.Err(); err != nil {
+ return "", nil, err
+ }
+ if sameAsPrevious {
+ return line, nil, nil
+ }
+ return line, addrs, nil
+}
+
+// parseAdditionalSections parses any additional sections in the
+// profile, ignoring any unrecognized sections.
+func parseAdditionalSections(s *bufio.Scanner, p *Profile) error {
+ for !isMemoryMapSentinel(s.Text()) && s.Scan() {
+ }
+ if err := s.Err(); err != nil {
+ return err
+ }
+ return p.ParseMemoryMapFromScanner(s)
+}
+
+// ParseProcMaps parses a memory map in the format of /proc/self/maps.
+// ParseMemoryMap should be called after setting on a profile to
+// associate locations to the corresponding mapping based on their
+// address.
+func ParseProcMaps(rd io.Reader) ([]*Mapping, error) {
+ s := bufio.NewScanner(rd)
+ return parseProcMapsFromScanner(s)
+}
+
+func parseProcMapsFromScanner(s *bufio.Scanner) ([]*Mapping, error) {
+ var mapping []*Mapping
+
+ var attrs []string
+ const delimiter = "="
+ r := strings.NewReplacer()
+ for s.Scan() {
+ line := r.Replace(removeLoggingInfo(s.Text()))
+ m, err := parseMappingEntry(line)
+ if err != nil {
+ if err == errUnrecognized {
+ // Recognize assignments of the form: attr=value, and replace
+ // $attr with value on subsequent mappings.
+ if attr := strings.SplitN(line, delimiter, 2); len(attr) == 2 {
+ attrs = append(attrs, "$"+strings.TrimSpace(attr[0]), strings.TrimSpace(attr[1]))
+ r = strings.NewReplacer(attrs...)
+ }
+ // Ignore any unrecognized entries
+ continue
+ }
+ return nil, err
+ }
+ if m == nil {
+ continue
+ }
+ mapping = append(mapping, m)
+ }
+ if err := s.Err(); err != nil {
+ return nil, err
+ }
+ return mapping, nil
+}
+
+// removeLoggingInfo detects and removes log prefix entries generated
+// by the glog package. If no logging prefix is detected, the string
+// is returned unmodified.
+func removeLoggingInfo(line string) string {
+ if match := logInfoRE.FindStringIndex(line); match != nil {
+ return line[match[1]:]
+ }
+ return line
+}
+
+// ParseMemoryMap parses a memory map in the format of
+// /proc/self/maps, and overrides the mappings in the current profile.
+// It renumbers the samples and locations in the profile correspondingly.
+func (p *Profile) ParseMemoryMap(rd io.Reader) error {
+ return p.ParseMemoryMapFromScanner(bufio.NewScanner(rd))
+}
+
+// ParseMemoryMapFromScanner parses a memory map in the format of
+// /proc/self/maps or a variety of legacy format, and overrides the
+// mappings in the current profile. It renumbers the samples and
+// locations in the profile correspondingly.
+func (p *Profile) ParseMemoryMapFromScanner(s *bufio.Scanner) error {
+ mapping, err := parseProcMapsFromScanner(s)
+ if err != nil {
+ return err
+ }
+ p.Mapping = append(p.Mapping, mapping...)
+ p.massageMappings()
+ p.remapLocationIDs()
+ p.remapFunctionIDs()
+ p.remapMappingIDs()
+ return nil
+}
+
+func parseMappingEntry(l string) (*Mapping, error) {
+ var start, end, perm, file, offset, buildID string
+ if me := procMapsRE.FindStringSubmatch(l); len(me) == 6 {
+ start, end, perm, offset, file = me[1], me[2], me[3], me[4], me[5]
+ } else if me := briefMapsRE.FindStringSubmatch(l); len(me) == 7 {
+ start, end, perm, file, offset, buildID = me[1], me[2], me[3], me[4], me[5], me[6]
+ } else {
+ return nil, errUnrecognized
+ }
+
+ var err error
+ mapping := &Mapping{
+ File: file,
+ BuildID: buildID,
+ }
+ if perm != "" && !strings.Contains(perm, "x") {
+ // Skip non-executable entries.
+ return nil, nil
+ }
+ if mapping.Start, err = strconv.ParseUint(start, 16, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ if mapping.Limit, err = strconv.ParseUint(end, 16, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ if offset != "" {
+ if mapping.Offset, err = strconv.ParseUint(offset, 16, 64); err != nil {
+ return nil, errUnrecognized
+ }
+ }
+ return mapping, nil
+}
+
+var memoryMapSentinels = []string{
+ "--- Memory map: ---",
+ "MAPPED_LIBRARIES:",
+}
+
+// isMemoryMapSentinel returns true if the string contains one of the
+// known sentinels for memory map information.
+func isMemoryMapSentinel(line string) bool {
+ for _, s := range memoryMapSentinels {
+ if strings.Contains(line, s) {
+ return true
+ }
+ }
+ return false
+}
+
+func (p *Profile) addLegacyFrameInfo() {
+ switch {
+ case isProfileType(p, heapzSampleTypes):
+ p.DropFrames, p.KeepFrames = allocRxStr, allocSkipRxStr
+ case isProfileType(p, contentionzSampleTypes):
+ p.DropFrames, p.KeepFrames = lockRxStr, ""
+ default:
+ p.DropFrames, p.KeepFrames = cpuProfilerRxStr, ""
+ }
+}
+
+var heapzSampleTypes = [][]string{
+ {"allocations", "size"}, // early Go pprof profiles
+ {"objects", "space"},
+ {"inuse_objects", "inuse_space"},
+ {"alloc_objects", "alloc_space"},
+ {"alloc_objects", "alloc_space", "inuse_objects", "inuse_space"}, // Go pprof legacy profiles
+}
+var contentionzSampleTypes = [][]string{
+ {"contentions", "delay"},
+}
+
+func isProfileType(p *Profile, types [][]string) bool {
+ st := p.SampleType
+nextType:
+ for _, t := range types {
+ if len(st) != len(t) {
+ continue
+ }
+
+ for i := range st {
+ if st[i].Type != t[i] {
+ continue nextType
+ }
+ }
+ return true
+ }
+ return false
+}
+
+var allocRxStr = strings.Join([]string{
+ // POSIX entry points.
+ `calloc`,
+ `cfree`,
+ `malloc`,
+ `free`,
+ `memalign`,
+ `do_memalign`,
+ `(__)?posix_memalign`,
+ `pvalloc`,
+ `valloc`,
+ `realloc`,
+
+ // TC malloc.
+ `tcmalloc::.*`,
+ `tc_calloc`,
+ `tc_cfree`,
+ `tc_malloc`,
+ `tc_free`,
+ `tc_memalign`,
+ `tc_posix_memalign`,
+ `tc_pvalloc`,
+ `tc_valloc`,
+ `tc_realloc`,
+ `tc_new`,
+ `tc_delete`,
+ `tc_newarray`,
+ `tc_deletearray`,
+ `tc_new_nothrow`,
+ `tc_newarray_nothrow`,
+
+ // Memory-allocation routines on OS X.
+ `malloc_zone_malloc`,
+ `malloc_zone_calloc`,
+ `malloc_zone_valloc`,
+ `malloc_zone_realloc`,
+ `malloc_zone_memalign`,
+ `malloc_zone_free`,
+
+ // Go runtime
+ `runtime\..*`,
+
+ // Other misc. memory allocation routines
+ `BaseArena::.*`,
+ `(::)?do_malloc_no_errno`,
+ `(::)?do_malloc_pages`,
+ `(::)?do_malloc`,
+ `DoSampledAllocation`,
+ `MallocedMemBlock::MallocedMemBlock`,
+ `_M_allocate`,
+ `__builtin_(vec_)?delete`,
+ `__builtin_(vec_)?new`,
+ `__gnu_cxx::new_allocator::allocate`,
+ `__libc_malloc`,
+ `__malloc_alloc_template::allocate`,
+ `allocate`,
+ `cpp_alloc`,
+ `operator new(\[\])?`,
+ `simple_alloc::allocate`,
+}, `|`)
+
+var allocSkipRxStr = strings.Join([]string{
+ // Preserve Go runtime frames that appear in the middle/bottom of
+ // the stack.
+ `runtime\.panic`,
+ `runtime\.reflectcall`,
+ `runtime\.call[0-9]*`,
+}, `|`)
+
+var cpuProfilerRxStr = strings.Join([]string{
+ `ProfileData::Add`,
+ `ProfileData::prof_handler`,
+ `CpuProfiler::prof_handler`,
+ `__pthread_sighandler`,
+ `__restore`,
+}, `|`)
+
+var lockRxStr = strings.Join([]string{
+ `RecordLockProfileData`,
+ `(base::)?RecordLockProfileData.*`,
+ `(base::)?SubmitMutexProfileData.*`,
+ `(base::)?SubmitSpinLockProfileData.*`,
+ `(base::Mutex::)?AwaitCommon.*`,
+ `(base::Mutex::)?Unlock.*`,
+ `(base::Mutex::)?UnlockSlow.*`,
+ `(base::Mutex::)?ReaderUnlock.*`,
+ `(base::MutexLock::)?~MutexLock.*`,
+ `(Mutex::)?AwaitCommon.*`,
+ `(Mutex::)?Unlock.*`,
+ `(Mutex::)?UnlockSlow.*`,
+ `(Mutex::)?ReaderUnlock.*`,
+ `(MutexLock::)?~MutexLock.*`,
+ `(SpinLock::)?Unlock.*`,
+ `(SpinLock::)?SlowUnlock.*`,
+ `(SpinLockHolder::)?~SpinLockHolder.*`,
+}, `|`)
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/merge.go b/src/cmd/vendor/github.com/google/pprof/profile/merge.go
new file mode 100644
index 0000000..4dcc27f
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/merge.go
@@ -0,0 +1,479 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package profile
+
+import (
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// Compact performs garbage collection on a profile to remove any
+// unreferenced fields. This is useful to reduce the size of a profile
+// after samples or locations have been removed.
+func (p *Profile) Compact() *Profile {
+ p, _ = Merge([]*Profile{p})
+ return p
+}
+
+// Merge merges all the profiles in profs into a single Profile.
+// Returns a new profile independent of the input profiles. The merged
+// profile is compacted to eliminate unused samples, locations,
+// functions and mappings. Profiles must have identical profile sample
+// and period types or the merge will fail. profile.Period of the
+// resulting profile will be the maximum of all profiles, and
+// profile.TimeNanos will be the earliest nonzero one.
+func Merge(srcs []*Profile) (*Profile, error) {
+ if len(srcs) == 0 {
+ return nil, fmt.Errorf("no profiles to merge")
+ }
+ p, err := combineHeaders(srcs)
+ if err != nil {
+ return nil, err
+ }
+
+ pm := &profileMerger{
+ p: p,
+ samples: make(map[sampleKey]*Sample, len(srcs[0].Sample)),
+ locations: make(map[locationKey]*Location, len(srcs[0].Location)),
+ functions: make(map[functionKey]*Function, len(srcs[0].Function)),
+ mappings: make(map[mappingKey]*Mapping, len(srcs[0].Mapping)),
+ }
+
+ for _, src := range srcs {
+ // Clear the profile-specific hash tables
+ pm.locationsByID = make(map[uint64]*Location, len(src.Location))
+ pm.functionsByID = make(map[uint64]*Function, len(src.Function))
+ pm.mappingsByID = make(map[uint64]mapInfo, len(src.Mapping))
+
+ if len(pm.mappings) == 0 && len(src.Mapping) > 0 {
+ // The Mapping list has the property that the first mapping
+ // represents the main binary. Take the first Mapping we see,
+ // otherwise the operations below will add mappings in an
+ // arbitrary order.
+ pm.mapMapping(src.Mapping[0])
+ }
+
+ for _, s := range src.Sample {
+ if !isZeroSample(s) {
+ pm.mapSample(s)
+ }
+ }
+ }
+
+ for _, s := range p.Sample {
+ if isZeroSample(s) {
+ // If there are any zero samples, re-merge the profile to GC
+ // them.
+ return Merge([]*Profile{p})
+ }
+ }
+
+ return p, nil
+}
+
+// Normalize normalizes the source profile by multiplying each value in profile by the
+// ratio of the sum of the base profile's values of that sample type to the sum of the
+// source profile's value of that sample type.
+func (p *Profile) Normalize(pb *Profile) error {
+
+ if err := p.compatible(pb); err != nil {
+ return err
+ }
+
+ baseVals := make([]int64, len(p.SampleType))
+ for _, s := range pb.Sample {
+ for i, v := range s.Value {
+ baseVals[i] += v
+ }
+ }
+
+ srcVals := make([]int64, len(p.SampleType))
+ for _, s := range p.Sample {
+ for i, v := range s.Value {
+ srcVals[i] += v
+ }
+ }
+
+ normScale := make([]float64, len(baseVals))
+ for i := range baseVals {
+ if srcVals[i] == 0 {
+ normScale[i] = 0.0
+ } else {
+ normScale[i] = float64(baseVals[i]) / float64(srcVals[i])
+ }
+ }
+ p.ScaleN(normScale)
+ return nil
+}
+
+func isZeroSample(s *Sample) bool {
+ for _, v := range s.Value {
+ if v != 0 {
+ return false
+ }
+ }
+ return true
+}
+
+type profileMerger struct {
+ p *Profile
+
+ // Memoization tables within a profile.
+ locationsByID map[uint64]*Location
+ functionsByID map[uint64]*Function
+ mappingsByID map[uint64]mapInfo
+
+ // Memoization tables for profile entities.
+ samples map[sampleKey]*Sample
+ locations map[locationKey]*Location
+ functions map[functionKey]*Function
+ mappings map[mappingKey]*Mapping
+}
+
+type mapInfo struct {
+ m *Mapping
+ offset int64
+}
+
+func (pm *profileMerger) mapSample(src *Sample) *Sample {
+ s := &Sample{
+ Location: make([]*Location, len(src.Location)),
+ Value: make([]int64, len(src.Value)),
+ Label: make(map[string][]string, len(src.Label)),
+ NumLabel: make(map[string][]int64, len(src.NumLabel)),
+ NumUnit: make(map[string][]string, len(src.NumLabel)),
+ }
+ for i, l := range src.Location {
+ s.Location[i] = pm.mapLocation(l)
+ }
+ for k, v := range src.Label {
+ vv := make([]string, len(v))
+ copy(vv, v)
+ s.Label[k] = vv
+ }
+ for k, v := range src.NumLabel {
+ u := src.NumUnit[k]
+ vv := make([]int64, len(v))
+ uu := make([]string, len(u))
+ copy(vv, v)
+ copy(uu, u)
+ s.NumLabel[k] = vv
+ s.NumUnit[k] = uu
+ }
+ // Check memoization table. Must be done on the remapped location to
+ // account for the remapped mapping. Add current values to the
+ // existing sample.
+ k := s.key()
+ if ss, ok := pm.samples[k]; ok {
+ for i, v := range src.Value {
+ ss.Value[i] += v
+ }
+ return ss
+ }
+ copy(s.Value, src.Value)
+ pm.samples[k] = s
+ pm.p.Sample = append(pm.p.Sample, s)
+ return s
+}
+
+// key generates sampleKey to be used as a key for maps.
+func (sample *Sample) key() sampleKey {
+ ids := make([]string, len(sample.Location))
+ for i, l := range sample.Location {
+ ids[i] = strconv.FormatUint(l.ID, 16)
+ }
+
+ labels := make([]string, 0, len(sample.Label))
+ for k, v := range sample.Label {
+ labels = append(labels, fmt.Sprintf("%q%q", k, v))
+ }
+ sort.Strings(labels)
+
+ numlabels := make([]string, 0, len(sample.NumLabel))
+ for k, v := range sample.NumLabel {
+ numlabels = append(numlabels, fmt.Sprintf("%q%x%x", k, v, sample.NumUnit[k]))
+ }
+ sort.Strings(numlabels)
+
+ return sampleKey{
+ strings.Join(ids, "|"),
+ strings.Join(labels, ""),
+ strings.Join(numlabels, ""),
+ }
+}
+
+type sampleKey struct {
+ locations string
+ labels string
+ numlabels string
+}
+
+func (pm *profileMerger) mapLocation(src *Location) *Location {
+ if src == nil {
+ return nil
+ }
+
+ if l, ok := pm.locationsByID[src.ID]; ok {
+ pm.locationsByID[src.ID] = l
+ return l
+ }
+
+ mi := pm.mapMapping(src.Mapping)
+ l := &Location{
+ ID: uint64(len(pm.p.Location) + 1),
+ Mapping: mi.m,
+ Address: uint64(int64(src.Address) + mi.offset),
+ Line: make([]Line, len(src.Line)),
+ IsFolded: src.IsFolded,
+ }
+ for i, ln := range src.Line {
+ l.Line[i] = pm.mapLine(ln)
+ }
+ // Check memoization table. Must be done on the remapped location to
+ // account for the remapped mapping ID.
+ k := l.key()
+ if ll, ok := pm.locations[k]; ok {
+ pm.locationsByID[src.ID] = ll
+ return ll
+ }
+ pm.locationsByID[src.ID] = l
+ pm.locations[k] = l
+ pm.p.Location = append(pm.p.Location, l)
+ return l
+}
+
+// key generates locationKey to be used as a key for maps.
+func (l *Location) key() locationKey {
+ key := locationKey{
+ addr: l.Address,
+ isFolded: l.IsFolded,
+ }
+ if l.Mapping != nil {
+ // Normalizes address to handle address space randomization.
+ key.addr -= l.Mapping.Start
+ key.mappingID = l.Mapping.ID
+ }
+ lines := make([]string, len(l.Line)*2)
+ for i, line := range l.Line {
+ if line.Function != nil {
+ lines[i*2] = strconv.FormatUint(line.Function.ID, 16)
+ }
+ lines[i*2+1] = strconv.FormatInt(line.Line, 16)
+ }
+ key.lines = strings.Join(lines, "|")
+ return key
+}
+
+type locationKey struct {
+ addr, mappingID uint64
+ lines string
+ isFolded bool
+}
+
+func (pm *profileMerger) mapMapping(src *Mapping) mapInfo {
+ if src == nil {
+ return mapInfo{}
+ }
+
+ if mi, ok := pm.mappingsByID[src.ID]; ok {
+ return mi
+ }
+
+ // Check memoization tables.
+ mk := src.key()
+ if m, ok := pm.mappings[mk]; ok {
+ mi := mapInfo{m, int64(m.Start) - int64(src.Start)}
+ pm.mappingsByID[src.ID] = mi
+ return mi
+ }
+ m := &Mapping{
+ ID: uint64(len(pm.p.Mapping) + 1),
+ Start: src.Start,
+ Limit: src.Limit,
+ Offset: src.Offset,
+ File: src.File,
+ BuildID: src.BuildID,
+ HasFunctions: src.HasFunctions,
+ HasFilenames: src.HasFilenames,
+ HasLineNumbers: src.HasLineNumbers,
+ HasInlineFrames: src.HasInlineFrames,
+ }
+ pm.p.Mapping = append(pm.p.Mapping, m)
+
+ // Update memoization tables.
+ pm.mappings[mk] = m
+ mi := mapInfo{m, 0}
+ pm.mappingsByID[src.ID] = mi
+ return mi
+}
+
+// key generates encoded strings of Mapping to be used as a key for
+// maps.
+func (m *Mapping) key() mappingKey {
+ // Normalize addresses to handle address space randomization.
+ // Round up to next 4K boundary to avoid minor discrepancies.
+ const mapsizeRounding = 0x1000
+
+ size := m.Limit - m.Start
+ size = size + mapsizeRounding - 1
+ size = size - (size % mapsizeRounding)
+ key := mappingKey{
+ size: size,
+ offset: m.Offset,
+ }
+
+ switch {
+ case m.BuildID != "":
+ key.buildIDOrFile = m.BuildID
+ case m.File != "":
+ key.buildIDOrFile = m.File
+ default:
+ // A mapping containing neither build ID nor file name is a fake mapping. A
+ // key with empty buildIDOrFile is used for fake mappings so that they are
+ // treated as the same mapping during merging.
+ }
+ return key
+}
+
+type mappingKey struct {
+ size, offset uint64
+ buildIDOrFile string
+}
+
+func (pm *profileMerger) mapLine(src Line) Line {
+ ln := Line{
+ Function: pm.mapFunction(src.Function),
+ Line: src.Line,
+ }
+ return ln
+}
+
+func (pm *profileMerger) mapFunction(src *Function) *Function {
+ if src == nil {
+ return nil
+ }
+ if f, ok := pm.functionsByID[src.ID]; ok {
+ return f
+ }
+ k := src.key()
+ if f, ok := pm.functions[k]; ok {
+ pm.functionsByID[src.ID] = f
+ return f
+ }
+ f := &Function{
+ ID: uint64(len(pm.p.Function) + 1),
+ Name: src.Name,
+ SystemName: src.SystemName,
+ Filename: src.Filename,
+ StartLine: src.StartLine,
+ }
+ pm.functions[k] = f
+ pm.functionsByID[src.ID] = f
+ pm.p.Function = append(pm.p.Function, f)
+ return f
+}
+
+// key generates a struct to be used as a key for maps.
+func (f *Function) key() functionKey {
+ return functionKey{
+ f.StartLine,
+ f.Name,
+ f.SystemName,
+ f.Filename,
+ }
+}
+
+type functionKey struct {
+ startLine int64
+ name, systemName, fileName string
+}
+
+// combineHeaders checks that all profiles can be merged and returns
+// their combined profile.
+func combineHeaders(srcs []*Profile) (*Profile, error) {
+ for _, s := range srcs[1:] {
+ if err := srcs[0].compatible(s); err != nil {
+ return nil, err
+ }
+ }
+
+ var timeNanos, durationNanos, period int64
+ var comments []string
+ seenComments := map[string]bool{}
+ var defaultSampleType string
+ for _, s := range srcs {
+ if timeNanos == 0 || s.TimeNanos < timeNanos {
+ timeNanos = s.TimeNanos
+ }
+ durationNanos += s.DurationNanos
+ if period == 0 || period < s.Period {
+ period = s.Period
+ }
+ for _, c := range s.Comments {
+ if seen := seenComments[c]; !seen {
+ comments = append(comments, c)
+ seenComments[c] = true
+ }
+ }
+ if defaultSampleType == "" {
+ defaultSampleType = s.DefaultSampleType
+ }
+ }
+
+ p := &Profile{
+ SampleType: make([]*ValueType, len(srcs[0].SampleType)),
+
+ DropFrames: srcs[0].DropFrames,
+ KeepFrames: srcs[0].KeepFrames,
+
+ TimeNanos: timeNanos,
+ DurationNanos: durationNanos,
+ PeriodType: srcs[0].PeriodType,
+ Period: period,
+
+ Comments: comments,
+ DefaultSampleType: defaultSampleType,
+ }
+ copy(p.SampleType, srcs[0].SampleType)
+ return p, nil
+}
+
+// compatible determines if two profiles can be compared/merged.
+// returns nil if the profiles are compatible; otherwise an error with
+// details on the incompatibility.
+func (p *Profile) compatible(pb *Profile) error {
+ if !equalValueType(p.PeriodType, pb.PeriodType) {
+ return fmt.Errorf("incompatible period types %v and %v", p.PeriodType, pb.PeriodType)
+ }
+
+ if len(p.SampleType) != len(pb.SampleType) {
+ return fmt.Errorf("incompatible sample types %v and %v", p.SampleType, pb.SampleType)
+ }
+
+ for i := range p.SampleType {
+ if !equalValueType(p.SampleType[i], pb.SampleType[i]) {
+ return fmt.Errorf("incompatible sample types %v and %v", p.SampleType, pb.SampleType)
+ }
+ }
+ return nil
+}
+
+// equalValueType returns true if the two value types are semantically
+// equal. It ignores the internal fields used during encode/decode.
+func equalValueType(st1, st2 *ValueType) bool {
+ return st1.Type == st2.Type && st1.Unit == st2.Unit
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/profile.go b/src/cmd/vendor/github.com/google/pprof/profile/profile.go
new file mode 100644
index 0000000..d94d8b3
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/profile.go
@@ -0,0 +1,793 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package profile provides a representation of profile.proto and
+// methods to encode/decode profiles in this format.
+package profile
+
+import (
+ "bytes"
+ "compress/gzip"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+)
+
+// Profile is an in-memory representation of profile.proto.
+type Profile struct {
+ SampleType []*ValueType
+ DefaultSampleType string
+ Sample []*Sample
+ Mapping []*Mapping
+ Location []*Location
+ Function []*Function
+ Comments []string
+
+ DropFrames string
+ KeepFrames string
+
+ TimeNanos int64
+ DurationNanos int64
+ PeriodType *ValueType
+ Period int64
+
+ // The following fields are modified during encoding and copying,
+ // so are protected by a Mutex.
+ encodeMu sync.Mutex
+
+ commentX []int64
+ dropFramesX int64
+ keepFramesX int64
+ stringTable []string
+ defaultSampleTypeX int64
+}
+
+// ValueType corresponds to Profile.ValueType
+type ValueType struct {
+ Type string // cpu, wall, inuse_space, etc
+ Unit string // seconds, nanoseconds, bytes, etc
+
+ typeX int64
+ unitX int64
+}
+
+// Sample corresponds to Profile.Sample
+type Sample struct {
+ Location []*Location
+ Value []int64
+ Label map[string][]string
+ NumLabel map[string][]int64
+ NumUnit map[string][]string
+
+ locationIDX []uint64
+ labelX []label
+}
+
+// label corresponds to Profile.Label
+type label struct {
+ keyX int64
+ // Exactly one of the two following values must be set
+ strX int64
+ numX int64 // Integer value for this label
+ // can be set if numX has value
+ unitX int64
+}
+
+// Mapping corresponds to Profile.Mapping
+type Mapping struct {
+ ID uint64
+ Start uint64
+ Limit uint64
+ Offset uint64
+ File string
+ BuildID string
+ HasFunctions bool
+ HasFilenames bool
+ HasLineNumbers bool
+ HasInlineFrames bool
+
+ fileX int64
+ buildIDX int64
+}
+
+// Location corresponds to Profile.Location
+type Location struct {
+ ID uint64
+ Mapping *Mapping
+ Address uint64
+ Line []Line
+ IsFolded bool
+
+ mappingIDX uint64
+}
+
+// Line corresponds to Profile.Line
+type Line struct {
+ Function *Function
+ Line int64
+
+ functionIDX uint64
+}
+
+// Function corresponds to Profile.Function
+type Function struct {
+ ID uint64
+ Name string
+ SystemName string
+ Filename string
+ StartLine int64
+
+ nameX int64
+ systemNameX int64
+ filenameX int64
+}
+
+// Parse parses a profile and checks for its validity. The input
+// may be a gzip-compressed encoded protobuf or one of many legacy
+// profile formats which may be unsupported in the future.
+func Parse(r io.Reader) (*Profile, error) {
+ data, err := ioutil.ReadAll(r)
+ if err != nil {
+ return nil, err
+ }
+ return ParseData(data)
+}
+
+// ParseData parses a profile from a buffer and checks for its
+// validity.
+func ParseData(data []byte) (*Profile, error) {
+ var p *Profile
+ var err error
+ if len(data) >= 2 && data[0] == 0x1f && data[1] == 0x8b {
+ gz, err := gzip.NewReader(bytes.NewBuffer(data))
+ if err == nil {
+ data, err = ioutil.ReadAll(gz)
+ }
+ if err != nil {
+ return nil, fmt.Errorf("decompressing profile: %v", err)
+ }
+ }
+ if p, err = ParseUncompressed(data); err != nil && err != errNoData && err != errConcatProfile {
+ p, err = parseLegacy(data)
+ }
+
+ if err != nil {
+ return nil, fmt.Errorf("parsing profile: %v", err)
+ }
+
+ if err := p.CheckValid(); err != nil {
+ return nil, fmt.Errorf("malformed profile: %v", err)
+ }
+ return p, nil
+}
+
+var errUnrecognized = fmt.Errorf("unrecognized profile format")
+var errMalformed = fmt.Errorf("malformed profile format")
+var errNoData = fmt.Errorf("empty input file")
+var errConcatProfile = fmt.Errorf("concatenated profiles detected")
+
+func parseLegacy(data []byte) (*Profile, error) {
+ parsers := []func([]byte) (*Profile, error){
+ parseCPU,
+ parseHeap,
+ parseGoCount, // goroutine, threadcreate
+ parseThread,
+ parseContention,
+ parseJavaProfile,
+ }
+
+ for _, parser := range parsers {
+ p, err := parser(data)
+ if err == nil {
+ p.addLegacyFrameInfo()
+ return p, nil
+ }
+ if err != errUnrecognized {
+ return nil, err
+ }
+ }
+ return nil, errUnrecognized
+}
+
+// ParseUncompressed parses an uncompressed protobuf into a profile.
+func ParseUncompressed(data []byte) (*Profile, error) {
+ if len(data) == 0 {
+ return nil, errNoData
+ }
+ p := &Profile{}
+ if err := unmarshal(data, p); err != nil {
+ return nil, err
+ }
+
+ if err := p.postDecode(); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+var libRx = regexp.MustCompile(`([.]so$|[.]so[._][0-9]+)`)
+
+// massageMappings applies heuristic-based changes to the profile
+// mappings to account for quirks of some environments.
+func (p *Profile) massageMappings() {
+ // Merge adjacent regions with matching names, checking that the offsets match
+ if len(p.Mapping) > 1 {
+ mappings := []*Mapping{p.Mapping[0]}
+ for _, m := range p.Mapping[1:] {
+ lm := mappings[len(mappings)-1]
+ if adjacent(lm, m) {
+ lm.Limit = m.Limit
+ if m.File != "" {
+ lm.File = m.File
+ }
+ if m.BuildID != "" {
+ lm.BuildID = m.BuildID
+ }
+ p.updateLocationMapping(m, lm)
+ continue
+ }
+ mappings = append(mappings, m)
+ }
+ p.Mapping = mappings
+ }
+
+ // Use heuristics to identify main binary and move it to the top of the list of mappings
+ for i, m := range p.Mapping {
+ file := strings.TrimSpace(strings.Replace(m.File, "(deleted)", "", -1))
+ if len(file) == 0 {
+ continue
+ }
+ if len(libRx.FindStringSubmatch(file)) > 0 {
+ continue
+ }
+ if file[0] == '[' {
+ continue
+ }
+ // Swap what we guess is main to position 0.
+ p.Mapping[0], p.Mapping[i] = p.Mapping[i], p.Mapping[0]
+ break
+ }
+
+ // Keep the mapping IDs neatly sorted
+ for i, m := range p.Mapping {
+ m.ID = uint64(i + 1)
+ }
+}
+
+// adjacent returns whether two mapping entries represent the same
+// mapping that has been split into two. Check that their addresses are adjacent,
+// and if the offsets match, if they are available.
+func adjacent(m1, m2 *Mapping) bool {
+ if m1.File != "" && m2.File != "" {
+ if m1.File != m2.File {
+ return false
+ }
+ }
+ if m1.BuildID != "" && m2.BuildID != "" {
+ if m1.BuildID != m2.BuildID {
+ return false
+ }
+ }
+ if m1.Limit != m2.Start {
+ return false
+ }
+ if m1.Offset != 0 && m2.Offset != 0 {
+ offset := m1.Offset + (m1.Limit - m1.Start)
+ if offset != m2.Offset {
+ return false
+ }
+ }
+ return true
+}
+
+func (p *Profile) updateLocationMapping(from, to *Mapping) {
+ for _, l := range p.Location {
+ if l.Mapping == from {
+ l.Mapping = to
+ }
+ }
+}
+
+func serialize(p *Profile) []byte {
+ p.encodeMu.Lock()
+ p.preEncode()
+ b := marshal(p)
+ p.encodeMu.Unlock()
+ return b
+}
+
+// Write writes the profile as a gzip-compressed marshaled protobuf.
+func (p *Profile) Write(w io.Writer) error {
+ zw := gzip.NewWriter(w)
+ defer zw.Close()
+ _, err := zw.Write(serialize(p))
+ return err
+}
+
+// WriteUncompressed writes the profile as a marshaled protobuf.
+func (p *Profile) WriteUncompressed(w io.Writer) error {
+ _, err := w.Write(serialize(p))
+ return err
+}
+
+// CheckValid tests whether the profile is valid. Checks include, but are
+// not limited to:
+// - len(Profile.Sample[n].value) == len(Profile.value_unit)
+// - Sample.id has a corresponding Profile.Location
+func (p *Profile) CheckValid() error {
+ // Check that sample values are consistent
+ sampleLen := len(p.SampleType)
+ if sampleLen == 0 && len(p.Sample) != 0 {
+ return fmt.Errorf("missing sample type information")
+ }
+ for _, s := range p.Sample {
+ if s == nil {
+ return fmt.Errorf("profile has nil sample")
+ }
+ if len(s.Value) != sampleLen {
+ return fmt.Errorf("mismatch: sample has %d values vs. %d types", len(s.Value), len(p.SampleType))
+ }
+ for _, l := range s.Location {
+ if l == nil {
+ return fmt.Errorf("sample has nil location")
+ }
+ }
+ }
+
+ // Check that all mappings/locations/functions are in the tables
+ // Check that there are no duplicate ids
+ mappings := make(map[uint64]*Mapping, len(p.Mapping))
+ for _, m := range p.Mapping {
+ if m == nil {
+ return fmt.Errorf("profile has nil mapping")
+ }
+ if m.ID == 0 {
+ return fmt.Errorf("found mapping with reserved ID=0")
+ }
+ if mappings[m.ID] != nil {
+ return fmt.Errorf("multiple mappings with same id: %d", m.ID)
+ }
+ mappings[m.ID] = m
+ }
+ functions := make(map[uint64]*Function, len(p.Function))
+ for _, f := range p.Function {
+ if f == nil {
+ return fmt.Errorf("profile has nil function")
+ }
+ if f.ID == 0 {
+ return fmt.Errorf("found function with reserved ID=0")
+ }
+ if functions[f.ID] != nil {
+ return fmt.Errorf("multiple functions with same id: %d", f.ID)
+ }
+ functions[f.ID] = f
+ }
+ locations := make(map[uint64]*Location, len(p.Location))
+ for _, l := range p.Location {
+ if l == nil {
+ return fmt.Errorf("profile has nil location")
+ }
+ if l.ID == 0 {
+ return fmt.Errorf("found location with reserved id=0")
+ }
+ if locations[l.ID] != nil {
+ return fmt.Errorf("multiple locations with same id: %d", l.ID)
+ }
+ locations[l.ID] = l
+ if m := l.Mapping; m != nil {
+ if m.ID == 0 || mappings[m.ID] != m {
+ return fmt.Errorf("inconsistent mapping %p: %d", m, m.ID)
+ }
+ }
+ for _, ln := range l.Line {
+ f := ln.Function
+ if f == nil {
+ return fmt.Errorf("location id: %d has a line with nil function", l.ID)
+ }
+ if f.ID == 0 || functions[f.ID] != f {
+ return fmt.Errorf("inconsistent function %p: %d", f, f.ID)
+ }
+ }
+ }
+ return nil
+}
+
+// Aggregate merges the locations in the profile into equivalence
+// classes preserving the request attributes. It also updates the
+// samples to point to the merged locations.
+func (p *Profile) Aggregate(inlineFrame, function, filename, linenumber, address bool) error {
+ for _, m := range p.Mapping {
+ m.HasInlineFrames = m.HasInlineFrames && inlineFrame
+ m.HasFunctions = m.HasFunctions && function
+ m.HasFilenames = m.HasFilenames && filename
+ m.HasLineNumbers = m.HasLineNumbers && linenumber
+ }
+
+ // Aggregate functions
+ if !function || !filename {
+ for _, f := range p.Function {
+ if !function {
+ f.Name = ""
+ f.SystemName = ""
+ }
+ if !filename {
+ f.Filename = ""
+ }
+ }
+ }
+
+ // Aggregate locations
+ if !inlineFrame || !address || !linenumber {
+ for _, l := range p.Location {
+ if !inlineFrame && len(l.Line) > 1 {
+ l.Line = l.Line[len(l.Line)-1:]
+ }
+ if !linenumber {
+ for i := range l.Line {
+ l.Line[i].Line = 0
+ }
+ }
+ if !address {
+ l.Address = 0
+ }
+ }
+ }
+
+ return p.CheckValid()
+}
+
+// NumLabelUnits returns a map of numeric label keys to the units
+// associated with those keys and a map of those keys to any units
+// that were encountered but not used.
+// Unit for a given key is the first encountered unit for that key. If multiple
+// units are encountered for values paired with a particular key, then the first
+// unit encountered is used and all other units are returned in sorted order
+// in map of ignored units.
+// If no units are encountered for a particular key, the unit is then inferred
+// based on the key.
+func (p *Profile) NumLabelUnits() (map[string]string, map[string][]string) {
+ numLabelUnits := map[string]string{}
+ ignoredUnits := map[string]map[string]bool{}
+ encounteredKeys := map[string]bool{}
+
+ // Determine units based on numeric tags for each sample.
+ for _, s := range p.Sample {
+ for k := range s.NumLabel {
+ encounteredKeys[k] = true
+ for _, unit := range s.NumUnit[k] {
+ if unit == "" {
+ continue
+ }
+ if wantUnit, ok := numLabelUnits[k]; !ok {
+ numLabelUnits[k] = unit
+ } else if wantUnit != unit {
+ if v, ok := ignoredUnits[k]; ok {
+ v[unit] = true
+ } else {
+ ignoredUnits[k] = map[string]bool{unit: true}
+ }
+ }
+ }
+ }
+ }
+ // Infer units for keys without any units associated with
+ // numeric tag values.
+ for key := range encounteredKeys {
+ unit := numLabelUnits[key]
+ if unit == "" {
+ switch key {
+ case "alignment", "request":
+ numLabelUnits[key] = "bytes"
+ default:
+ numLabelUnits[key] = key
+ }
+ }
+ }
+
+ // Copy ignored units into more readable format
+ unitsIgnored := make(map[string][]string, len(ignoredUnits))
+ for key, values := range ignoredUnits {
+ units := make([]string, len(values))
+ i := 0
+ for unit := range values {
+ units[i] = unit
+ i++
+ }
+ sort.Strings(units)
+ unitsIgnored[key] = units
+ }
+
+ return numLabelUnits, unitsIgnored
+}
+
+// String dumps a text representation of a profile. Intended mainly
+// for debugging purposes.
+func (p *Profile) String() string {
+ ss := make([]string, 0, len(p.Comments)+len(p.Sample)+len(p.Mapping)+len(p.Location))
+ for _, c := range p.Comments {
+ ss = append(ss, "Comment: "+c)
+ }
+ if pt := p.PeriodType; pt != nil {
+ ss = append(ss, fmt.Sprintf("PeriodType: %s %s", pt.Type, pt.Unit))
+ }
+ ss = append(ss, fmt.Sprintf("Period: %d", p.Period))
+ if p.TimeNanos != 0 {
+ ss = append(ss, fmt.Sprintf("Time: %v", time.Unix(0, p.TimeNanos)))
+ }
+ if p.DurationNanos != 0 {
+ ss = append(ss, fmt.Sprintf("Duration: %.4v", time.Duration(p.DurationNanos)))
+ }
+
+ ss = append(ss, "Samples:")
+ var sh1 string
+ for _, s := range p.SampleType {
+ dflt := ""
+ if s.Type == p.DefaultSampleType {
+ dflt = "[dflt]"
+ }
+ sh1 = sh1 + fmt.Sprintf("%s/%s%s ", s.Type, s.Unit, dflt)
+ }
+ ss = append(ss, strings.TrimSpace(sh1))
+ for _, s := range p.Sample {
+ ss = append(ss, s.string())
+ }
+
+ ss = append(ss, "Locations")
+ for _, l := range p.Location {
+ ss = append(ss, l.string())
+ }
+
+ ss = append(ss, "Mappings")
+ for _, m := range p.Mapping {
+ ss = append(ss, m.string())
+ }
+
+ return strings.Join(ss, "\n") + "\n"
+}
+
+// string dumps a text representation of a mapping. Intended mainly
+// for debugging purposes.
+func (m *Mapping) string() string {
+ bits := ""
+ if m.HasFunctions {
+ bits = bits + "[FN]"
+ }
+ if m.HasFilenames {
+ bits = bits + "[FL]"
+ }
+ if m.HasLineNumbers {
+ bits = bits + "[LN]"
+ }
+ if m.HasInlineFrames {
+ bits = bits + "[IN]"
+ }
+ return fmt.Sprintf("%d: %#x/%#x/%#x %s %s %s",
+ m.ID,
+ m.Start, m.Limit, m.Offset,
+ m.File,
+ m.BuildID,
+ bits)
+}
+
+// string dumps a text representation of a location. Intended mainly
+// for debugging purposes.
+func (l *Location) string() string {
+ ss := []string{}
+ locStr := fmt.Sprintf("%6d: %#x ", l.ID, l.Address)
+ if m := l.Mapping; m != nil {
+ locStr = locStr + fmt.Sprintf("M=%d ", m.ID)
+ }
+ if l.IsFolded {
+ locStr = locStr + "[F] "
+ }
+ if len(l.Line) == 0 {
+ ss = append(ss, locStr)
+ }
+ for li := range l.Line {
+ lnStr := "??"
+ if fn := l.Line[li].Function; fn != nil {
+ lnStr = fmt.Sprintf("%s %s:%d s=%d",
+ fn.Name,
+ fn.Filename,
+ l.Line[li].Line,
+ fn.StartLine)
+ if fn.Name != fn.SystemName {
+ lnStr = lnStr + "(" + fn.SystemName + ")"
+ }
+ }
+ ss = append(ss, locStr+lnStr)
+ // Do not print location details past the first line
+ locStr = " "
+ }
+ return strings.Join(ss, "\n")
+}
+
+// string dumps a text representation of a sample. Intended mainly
+// for debugging purposes.
+func (s *Sample) string() string {
+ ss := []string{}
+ var sv string
+ for _, v := range s.Value {
+ sv = fmt.Sprintf("%s %10d", sv, v)
+ }
+ sv = sv + ": "
+ for _, l := range s.Location {
+ sv = sv + fmt.Sprintf("%d ", l.ID)
+ }
+ ss = append(ss, sv)
+ const labelHeader = " "
+ if len(s.Label) > 0 {
+ ss = append(ss, labelHeader+labelsToString(s.Label))
+ }
+ if len(s.NumLabel) > 0 {
+ ss = append(ss, labelHeader+numLabelsToString(s.NumLabel, s.NumUnit))
+ }
+ return strings.Join(ss, "\n")
+}
+
+// labelsToString returns a string representation of a
+// map representing labels.
+func labelsToString(labels map[string][]string) string {
+ ls := []string{}
+ for k, v := range labels {
+ ls = append(ls, fmt.Sprintf("%s:%v", k, v))
+ }
+ sort.Strings(ls)
+ return strings.Join(ls, " ")
+}
+
+// numLabelsToString returns a string representation of a map
+// representing numeric labels.
+func numLabelsToString(numLabels map[string][]int64, numUnits map[string][]string) string {
+ ls := []string{}
+ for k, v := range numLabels {
+ units := numUnits[k]
+ var labelString string
+ if len(units) == len(v) {
+ values := make([]string, len(v))
+ for i, vv := range v {
+ values[i] = fmt.Sprintf("%d %s", vv, units[i])
+ }
+ labelString = fmt.Sprintf("%s:%v", k, values)
+ } else {
+ labelString = fmt.Sprintf("%s:%v", k, v)
+ }
+ ls = append(ls, labelString)
+ }
+ sort.Strings(ls)
+ return strings.Join(ls, " ")
+}
+
+// SetLabel sets the specified key to the specified value for all samples in the
+// profile.
+func (p *Profile) SetLabel(key string, value []string) {
+ for _, sample := range p.Sample {
+ if sample.Label == nil {
+ sample.Label = map[string][]string{key: value}
+ } else {
+ sample.Label[key] = value
+ }
+ }
+}
+
+// RemoveLabel removes all labels associated with the specified key for all
+// samples in the profile.
+func (p *Profile) RemoveLabel(key string) {
+ for _, sample := range p.Sample {
+ delete(sample.Label, key)
+ }
+}
+
+// HasLabel returns true if a sample has a label with indicated key and value.
+func (s *Sample) HasLabel(key, value string) bool {
+ for _, v := range s.Label[key] {
+ if v == value {
+ return true
+ }
+ }
+ return false
+}
+
+// DiffBaseSample returns true if a sample belongs to the diff base and false
+// otherwise.
+func (s *Sample) DiffBaseSample() bool {
+ return s.HasLabel("pprof::base", "true")
+}
+
+// Scale multiplies all sample values in a profile by a constant.
+func (p *Profile) Scale(ratio float64) {
+ if ratio == 1 {
+ return
+ }
+ ratios := make([]float64, len(p.SampleType))
+ for i := range p.SampleType {
+ ratios[i] = ratio
+ }
+ p.ScaleN(ratios)
+}
+
+// ScaleN multiplies each sample values in a sample by a different amount.
+func (p *Profile) ScaleN(ratios []float64) error {
+ if len(p.SampleType) != len(ratios) {
+ return fmt.Errorf("mismatched scale ratios, got %d, want %d", len(ratios), len(p.SampleType))
+ }
+ allOnes := true
+ for _, r := range ratios {
+ if r != 1 {
+ allOnes = false
+ break
+ }
+ }
+ if allOnes {
+ return nil
+ }
+ for _, s := range p.Sample {
+ for i, v := range s.Value {
+ if ratios[i] != 1 {
+ s.Value[i] = int64(float64(v) * ratios[i])
+ }
+ }
+ }
+ return nil
+}
+
+// HasFunctions determines if all locations in this profile have
+// symbolized function information.
+func (p *Profile) HasFunctions() bool {
+ for _, l := range p.Location {
+ if l.Mapping != nil && !l.Mapping.HasFunctions {
+ return false
+ }
+ }
+ return true
+}
+
+// HasFileLines determines if all locations in this profile have
+// symbolized file and line number information.
+func (p *Profile) HasFileLines() bool {
+ for _, l := range p.Location {
+ if l.Mapping != nil && (!l.Mapping.HasFilenames || !l.Mapping.HasLineNumbers) {
+ return false
+ }
+ }
+ return true
+}
+
+// Unsymbolizable returns true if a mapping points to a binary for which
+// locations can't be symbolized in principle, at least now. Examples are
+// "[vdso]", [vsyscall]" and some others, see the code.
+func (m *Mapping) Unsymbolizable() bool {
+ name := filepath.Base(m.File)
+ return strings.HasPrefix(name, "[") || strings.HasPrefix(name, "linux-vdso") || strings.HasPrefix(m.File, "/dev/dri/")
+}
+
+// Copy makes a fully independent copy of a profile.
+func (p *Profile) Copy() *Profile {
+ pp := &Profile{}
+ if err := unmarshal(serialize(p), pp); err != nil {
+ panic(err)
+ }
+ if err := pp.postDecode(); err != nil {
+ panic(err)
+ }
+
+ return pp
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/proto.go b/src/cmd/vendor/github.com/google/pprof/profile/proto.go
new file mode 100644
index 0000000..539ad3a
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/proto.go
@@ -0,0 +1,370 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is a simple protocol buffer encoder and decoder.
+// The format is described at
+// https://developers.google.com/protocol-buffers/docs/encoding
+//
+// A protocol message must implement the message interface:
+// decoder() []decoder
+// encode(*buffer)
+//
+// The decode method returns a slice indexed by field number that gives the
+// function to decode that field.
+// The encode method encodes its receiver into the given buffer.
+//
+// The two methods are simple enough to be implemented by hand rather than
+// by using a protocol compiler.
+//
+// See profile.go for examples of messages implementing this interface.
+//
+// There is no support for groups, message sets, or "has" bits.
+
+package profile
+
+import (
+ "errors"
+ "fmt"
+)
+
+type buffer struct {
+ field int // field tag
+ typ int // proto wire type code for field
+ u64 uint64
+ data []byte
+ tmp [16]byte
+}
+
+type decoder func(*buffer, message) error
+
+type message interface {
+ decoder() []decoder
+ encode(*buffer)
+}
+
+func marshal(m message) []byte {
+ var b buffer
+ m.encode(&b)
+ return b.data
+}
+
+func encodeVarint(b *buffer, x uint64) {
+ for x >= 128 {
+ b.data = append(b.data, byte(x)|0x80)
+ x >>= 7
+ }
+ b.data = append(b.data, byte(x))
+}
+
+func encodeLength(b *buffer, tag int, len int) {
+ encodeVarint(b, uint64(tag)<<3|2)
+ encodeVarint(b, uint64(len))
+}
+
+func encodeUint64(b *buffer, tag int, x uint64) {
+ // append varint to b.data
+ encodeVarint(b, uint64(tag)<<3)
+ encodeVarint(b, x)
+}
+
+func encodeUint64s(b *buffer, tag int, x []uint64) {
+ if len(x) > 2 {
+ // Use packed encoding
+ n1 := len(b.data)
+ for _, u := range x {
+ encodeVarint(b, u)
+ }
+ n2 := len(b.data)
+ encodeLength(b, tag, n2-n1)
+ n3 := len(b.data)
+ copy(b.tmp[:], b.data[n2:n3])
+ copy(b.data[n1+(n3-n2):], b.data[n1:n2])
+ copy(b.data[n1:], b.tmp[:n3-n2])
+ return
+ }
+ for _, u := range x {
+ encodeUint64(b, tag, u)
+ }
+}
+
+func encodeUint64Opt(b *buffer, tag int, x uint64) {
+ if x == 0 {
+ return
+ }
+ encodeUint64(b, tag, x)
+}
+
+func encodeInt64(b *buffer, tag int, x int64) {
+ u := uint64(x)
+ encodeUint64(b, tag, u)
+}
+
+func encodeInt64s(b *buffer, tag int, x []int64) {
+ if len(x) > 2 {
+ // Use packed encoding
+ n1 := len(b.data)
+ for _, u := range x {
+ encodeVarint(b, uint64(u))
+ }
+ n2 := len(b.data)
+ encodeLength(b, tag, n2-n1)
+ n3 := len(b.data)
+ copy(b.tmp[:], b.data[n2:n3])
+ copy(b.data[n1+(n3-n2):], b.data[n1:n2])
+ copy(b.data[n1:], b.tmp[:n3-n2])
+ return
+ }
+ for _, u := range x {
+ encodeInt64(b, tag, u)
+ }
+}
+
+func encodeInt64Opt(b *buffer, tag int, x int64) {
+ if x == 0 {
+ return
+ }
+ encodeInt64(b, tag, x)
+}
+
+func encodeString(b *buffer, tag int, x string) {
+ encodeLength(b, tag, len(x))
+ b.data = append(b.data, x...)
+}
+
+func encodeStrings(b *buffer, tag int, x []string) {
+ for _, s := range x {
+ encodeString(b, tag, s)
+ }
+}
+
+func encodeBool(b *buffer, tag int, x bool) {
+ if x {
+ encodeUint64(b, tag, 1)
+ } else {
+ encodeUint64(b, tag, 0)
+ }
+}
+
+func encodeBoolOpt(b *buffer, tag int, x bool) {
+ if x {
+ encodeBool(b, tag, x)
+ }
+}
+
+func encodeMessage(b *buffer, tag int, m message) {
+ n1 := len(b.data)
+ m.encode(b)
+ n2 := len(b.data)
+ encodeLength(b, tag, n2-n1)
+ n3 := len(b.data)
+ copy(b.tmp[:], b.data[n2:n3])
+ copy(b.data[n1+(n3-n2):], b.data[n1:n2])
+ copy(b.data[n1:], b.tmp[:n3-n2])
+}
+
+func unmarshal(data []byte, m message) (err error) {
+ b := buffer{data: data, typ: 2}
+ return decodeMessage(&b, m)
+}
+
+func le64(p []byte) uint64 {
+ return uint64(p[0]) | uint64(p[1])<<8 | uint64(p[2])<<16 | uint64(p[3])<<24 | uint64(p[4])<<32 | uint64(p[5])<<40 | uint64(p[6])<<48 | uint64(p[7])<<56
+}
+
+func le32(p []byte) uint32 {
+ return uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24
+}
+
+func decodeVarint(data []byte) (uint64, []byte, error) {
+ var u uint64
+ for i := 0; ; i++ {
+ if i >= 10 || i >= len(data) {
+ return 0, nil, errors.New("bad varint")
+ }
+ u |= uint64(data[i]&0x7F) << uint(7*i)
+ if data[i]&0x80 == 0 {
+ return u, data[i+1:], nil
+ }
+ }
+}
+
+func decodeField(b *buffer, data []byte) ([]byte, error) {
+ x, data, err := decodeVarint(data)
+ if err != nil {
+ return nil, err
+ }
+ b.field = int(x >> 3)
+ b.typ = int(x & 7)
+ b.data = nil
+ b.u64 = 0
+ switch b.typ {
+ case 0:
+ b.u64, data, err = decodeVarint(data)
+ if err != nil {
+ return nil, err
+ }
+ case 1:
+ if len(data) < 8 {
+ return nil, errors.New("not enough data")
+ }
+ b.u64 = le64(data[:8])
+ data = data[8:]
+ case 2:
+ var n uint64
+ n, data, err = decodeVarint(data)
+ if err != nil {
+ return nil, err
+ }
+ if n > uint64(len(data)) {
+ return nil, errors.New("too much data")
+ }
+ b.data = data[:n]
+ data = data[n:]
+ case 5:
+ if len(data) < 4 {
+ return nil, errors.New("not enough data")
+ }
+ b.u64 = uint64(le32(data[:4]))
+ data = data[4:]
+ default:
+ return nil, fmt.Errorf("unknown wire type: %d", b.typ)
+ }
+
+ return data, nil
+}
+
+func checkType(b *buffer, typ int) error {
+ if b.typ != typ {
+ return errors.New("type mismatch")
+ }
+ return nil
+}
+
+func decodeMessage(b *buffer, m message) error {
+ if err := checkType(b, 2); err != nil {
+ return err
+ }
+ dec := m.decoder()
+ data := b.data
+ for len(data) > 0 {
+ // pull varint field# + type
+ var err error
+ data, err = decodeField(b, data)
+ if err != nil {
+ return err
+ }
+ if b.field >= len(dec) || dec[b.field] == nil {
+ continue
+ }
+ if err := dec[b.field](b, m); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func decodeInt64(b *buffer, x *int64) error {
+ if err := checkType(b, 0); err != nil {
+ return err
+ }
+ *x = int64(b.u64)
+ return nil
+}
+
+func decodeInt64s(b *buffer, x *[]int64) error {
+ if b.typ == 2 {
+ // Packed encoding
+ data := b.data
+ tmp := make([]int64, 0, len(data)) // Maximally sized
+ for len(data) > 0 {
+ var u uint64
+ var err error
+
+ if u, data, err = decodeVarint(data); err != nil {
+ return err
+ }
+ tmp = append(tmp, int64(u))
+ }
+ *x = append(*x, tmp...)
+ return nil
+ }
+ var i int64
+ if err := decodeInt64(b, &i); err != nil {
+ return err
+ }
+ *x = append(*x, i)
+ return nil
+}
+
+func decodeUint64(b *buffer, x *uint64) error {
+ if err := checkType(b, 0); err != nil {
+ return err
+ }
+ *x = b.u64
+ return nil
+}
+
+func decodeUint64s(b *buffer, x *[]uint64) error {
+ if b.typ == 2 {
+ data := b.data
+ // Packed encoding
+ tmp := make([]uint64, 0, len(data)) // Maximally sized
+ for len(data) > 0 {
+ var u uint64
+ var err error
+
+ if u, data, err = decodeVarint(data); err != nil {
+ return err
+ }
+ tmp = append(tmp, u)
+ }
+ *x = append(*x, tmp...)
+ return nil
+ }
+ var u uint64
+ if err := decodeUint64(b, &u); err != nil {
+ return err
+ }
+ *x = append(*x, u)
+ return nil
+}
+
+func decodeString(b *buffer, x *string) error {
+ if err := checkType(b, 2); err != nil {
+ return err
+ }
+ *x = string(b.data)
+ return nil
+}
+
+func decodeStrings(b *buffer, x *[]string) error {
+ var s string
+ if err := decodeString(b, &s); err != nil {
+ return err
+ }
+ *x = append(*x, s)
+ return nil
+}
+
+func decodeBool(b *buffer, x *bool) error {
+ if err := checkType(b, 0); err != nil {
+ return err
+ }
+ if int64(b.u64) == 0 {
+ *x = false
+ } else {
+ *x = true
+ }
+ return nil
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/profile/prune.go b/src/cmd/vendor/github.com/google/pprof/profile/prune.go
new file mode 100644
index 0000000..02d21a8
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/profile/prune.go
@@ -0,0 +1,178 @@
+// Copyright 2014 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Implements methods to remove frames from profiles.
+
+package profile
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+var (
+ reservedNames = []string{"(anonymous namespace)", "operator()"}
+ bracketRx = func() *regexp.Regexp {
+ var quotedNames []string
+ for _, name := range append(reservedNames, "(") {
+ quotedNames = append(quotedNames, regexp.QuoteMeta(name))
+ }
+ return regexp.MustCompile(strings.Join(quotedNames, "|"))
+ }()
+)
+
+// simplifyFunc does some primitive simplification of function names.
+func simplifyFunc(f string) string {
+ // Account for leading '.' on the PPC ELF v1 ABI.
+ funcName := strings.TrimPrefix(f, ".")
+ // Account for unsimplified names -- try to remove the argument list by trimming
+ // starting from the first '(', but skipping reserved names that have '('.
+ for _, ind := range bracketRx.FindAllStringSubmatchIndex(funcName, -1) {
+ foundReserved := false
+ for _, res := range reservedNames {
+ if funcName[ind[0]:ind[1]] == res {
+ foundReserved = true
+ break
+ }
+ }
+ if !foundReserved {
+ funcName = funcName[:ind[0]]
+ break
+ }
+ }
+ return funcName
+}
+
+// Prune removes all nodes beneath a node matching dropRx, and not
+// matching keepRx. If the root node of a Sample matches, the sample
+// will have an empty stack.
+func (p *Profile) Prune(dropRx, keepRx *regexp.Regexp) {
+ prune := make(map[uint64]bool)
+ pruneBeneath := make(map[uint64]bool)
+
+ for _, loc := range p.Location {
+ var i int
+ for i = len(loc.Line) - 1; i >= 0; i-- {
+ if fn := loc.Line[i].Function; fn != nil && fn.Name != "" {
+ funcName := simplifyFunc(fn.Name)
+ if dropRx.MatchString(funcName) {
+ if keepRx == nil || !keepRx.MatchString(funcName) {
+ break
+ }
+ }
+ }
+ }
+
+ if i >= 0 {
+ // Found matching entry to prune.
+ pruneBeneath[loc.ID] = true
+
+ // Remove the matching location.
+ if i == len(loc.Line)-1 {
+ // Matched the top entry: prune the whole location.
+ prune[loc.ID] = true
+ } else {
+ loc.Line = loc.Line[i+1:]
+ }
+ }
+ }
+
+ // Prune locs from each Sample
+ for _, sample := range p.Sample {
+ // Scan from the root to the leaves to find the prune location.
+ // Do not prune frames before the first user frame, to avoid
+ // pruning everything.
+ foundUser := false
+ for i := len(sample.Location) - 1; i >= 0; i-- {
+ id := sample.Location[i].ID
+ if !prune[id] && !pruneBeneath[id] {
+ foundUser = true
+ continue
+ }
+ if !foundUser {
+ continue
+ }
+ if prune[id] {
+ sample.Location = sample.Location[i+1:]
+ break
+ }
+ if pruneBeneath[id] {
+ sample.Location = sample.Location[i:]
+ break
+ }
+ }
+ }
+}
+
+// RemoveUninteresting prunes and elides profiles using built-in
+// tables of uninteresting function names.
+func (p *Profile) RemoveUninteresting() error {
+ var keep, drop *regexp.Regexp
+ var err error
+
+ if p.DropFrames != "" {
+ if drop, err = regexp.Compile("^(" + p.DropFrames + ")$"); err != nil {
+ return fmt.Errorf("failed to compile regexp %s: %v", p.DropFrames, err)
+ }
+ if p.KeepFrames != "" {
+ if keep, err = regexp.Compile("^(" + p.KeepFrames + ")$"); err != nil {
+ return fmt.Errorf("failed to compile regexp %s: %v", p.KeepFrames, err)
+ }
+ }
+ p.Prune(drop, keep)
+ }
+ return nil
+}
+
+// PruneFrom removes all nodes beneath the lowest node matching dropRx, not including itself.
+//
+// Please see the example below to understand this method as well as
+// the difference from Prune method.
+//
+// A sample contains Location of [A,B,C,B,D] where D is the top frame and there's no inline.
+//
+// PruneFrom(A) returns [A,B,C,B,D] because there's no node beneath A.
+// Prune(A, nil) returns [B,C,B,D] by removing A itself.
+//
+// PruneFrom(B) returns [B,C,B,D] by removing all nodes beneath the first B when scanning from the bottom.
+// Prune(B, nil) returns [D] because a matching node is found by scanning from the root.
+func (p *Profile) PruneFrom(dropRx *regexp.Regexp) {
+ pruneBeneath := make(map[uint64]bool)
+
+ for _, loc := range p.Location {
+ for i := 0; i < len(loc.Line); i++ {
+ if fn := loc.Line[i].Function; fn != nil && fn.Name != "" {
+ funcName := simplifyFunc(fn.Name)
+ if dropRx.MatchString(funcName) {
+ // Found matching entry to prune.
+ pruneBeneath[loc.ID] = true
+ loc.Line = loc.Line[i:]
+ break
+ }
+ }
+ }
+ }
+
+ // Prune locs from each Sample
+ for _, sample := range p.Sample {
+ // Scan from the bottom leaf to the root to find the prune location.
+ for i, loc := range sample.Location {
+ if pruneBeneath[loc.ID] {
+ sample.Location = sample.Location[i:]
+ break
+ }
+ }
+ }
+}
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3/LICENSE b/src/cmd/vendor/github.com/google/pprof/third_party/d3/LICENSE
new file mode 100644
index 0000000..1d9d875
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2010-2017 Mike Bostock
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of the author nor the names of contributors may be used to
+ endorse or promote products derived from this software without specific prior
+ written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3/README.md b/src/cmd/vendor/github.com/google/pprof/third_party/d3/README.md
new file mode 100644
index 0000000..53e6eb6
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3/README.md
@@ -0,0 +1,119 @@
+# Building a customized D3.js bundle
+
+The D3.js version distributed with pprof is customized to only include the modules required by pprof.
+
+## Dependencies
+
+First, it's necessary to pull all bundle dependencies. We will use a JavaScript package manager, [npm](https://www.npmjs.com/), to accomplish that. npm dependencies are declared in a `package.json` file, so create one with the following configuration:
+
+```js
+{
+ "name": "d3-pprof",
+ "version": "1.0.0",
+ "description": "A d3.js bundle for pprof.",
+ "scripts": {
+ "prepare": "rollup -c && uglifyjs d3.js -c -m -o d3.min.js"
+ },
+ "license": "Apache-2.0",
+ "devDependencies": {
+ "d3-selection": "1.1.0",
+ "d3-hierarchy": "1.1.5",
+ "d3-scale": "1.0.6",
+ "d3-format": "1.2.0",
+ "d3-ease": "1.0.3",
+ "d3-array": "1.2.1",
+ "d3-collection": "1.0.4",
+ "d3-transition": "1.1.0",
+ "rollup": "0.51.8",
+ "rollup-plugin-node-resolve": "3",
+ "uglify-js": "3.1.10"
+ }
+}
+```
+
+Besides the bundle dependencies, the `package.json` file also specifies a script called `prepare`, which will be executed to create the bundle after `Rollup` is installed.
+
+## Bundler
+
+The simplest way of creating a custom bundle is to use a bundler, such as [Rollup](https://rollupjs.org/) or [Webpack](https://webpack.js.org/). Rollup will be used in this example.
+
+First, create a `rollup.config.js` file, containing the configuration Rollup should use to build the bundle.
+
+```js
+import node from "rollup-plugin-node-resolve";
+
+export default {
+ input: "index.js",
+ output: {
+ format: "umd",
+ file: "d3.js"
+ },
+ name: "d3",
+ plugins: [node()],
+ sourcemap: false
+};
+```
+
+Then create an `index.js` file containing all the functions that need to be exported in the bundle.
+
+```js
+export {
+ select,
+ selection,
+ event,
+} from "d3-selection";
+
+export {
+ hierarchy,
+ partition,
+} from "d3-hierarchy";
+
+export {
+ scaleLinear,
+} from "d3-scale";
+
+export {
+ format,
+} from "d3-format";
+
+export {
+ easeCubic,
+} from "d3-ease";
+
+export {
+ ascending,
+} from "d3-array";
+
+export {
+ map,
+} from "d3-collection";
+
+export {
+ transition,
+} from "d3-transition";
+```
+
+## Building
+
+Once all files were created, execute the following commands to pull all dependencies and build the bundle.
+
+```
+% npm install
+% npm run prepare
+```
+
+This will create two files, `d3.js` and `d3.min.js`, the custom D3.js bundle and its minified version respectively.
+
+# References
+
+## D3 Custom Bundle
+
+A demonstration of building a custom D3 4.0 bundle using ES2015 modules and Rollup.
+
+[bl.ocks.org/mbostock/bb09af4c39c79cffcde4](https://bl.ocks.org/mbostock/bb09af4c39c79cffcde4)
+
+## d3-pprof
+
+A repository containing all previously mentioned configuration files and the generated custom bundle.
+
+[github.com/spiermar/d3-pprof](https://github.com/spiermar/d3-pprof) \ No newline at end of file
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3/d3.go b/src/cmd/vendor/github.com/google/pprof/third_party/d3/d3.go
new file mode 100644
index 0000000..7d217c7
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3/d3.go
@@ -0,0 +1,4675 @@
+// D3.js is a JavaScript library for manipulating documents based on data.
+// https://github.com/d3/d3
+// See LICENSE file for license details
+// Custom build for pprof (https://github.com/spiermar/d3-pprof)
+
+package d3
+
+// JSSource returns the d3.js file
+const JSSource = `
+(function (global, factory) {
+ typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
+ typeof define === 'function' && define.amd ? define(['exports'], factory) :
+ (factory((global.d3 = {})));
+}(this, (function (exports) { 'use strict';
+
+var xhtml = "http://www.w3.org/1999/xhtml";
+
+var namespaces = {
+ svg: "http://www.w3.org/2000/svg",
+ xhtml: xhtml,
+ xlink: "http://www.w3.org/1999/xlink",
+ xml: "http://www.w3.org/XML/1998/namespace",
+ xmlns: "http://www.w3.org/2000/xmlns/"
+};
+
+var namespace = function(name) {
+ var prefix = name += "", i = prefix.indexOf(":");
+ if (i >= 0 && (prefix = name.slice(0, i)) !== "xmlns") name = name.slice(i + 1);
+ return namespaces.hasOwnProperty(prefix) ? {space: namespaces[prefix], local: name} : name;
+};
+
+function creatorInherit(name) {
+ return function() {
+ var document = this.ownerDocument,
+ uri = this.namespaceURI;
+ return uri === xhtml && document.documentElement.namespaceURI === xhtml
+ ? document.createElement(name)
+ : document.createElementNS(uri, name);
+ };
+}
+
+function creatorFixed(fullname) {
+ return function() {
+ return this.ownerDocument.createElementNS(fullname.space, fullname.local);
+ };
+}
+
+var creator = function(name) {
+ var fullname = namespace(name);
+ return (fullname.local
+ ? creatorFixed
+ : creatorInherit)(fullname);
+};
+
+var matcher = function(selector) {
+ return function() {
+ return this.matches(selector);
+ };
+};
+
+if (typeof document !== "undefined") {
+ var element = document.documentElement;
+ if (!element.matches) {
+ var vendorMatches = element.webkitMatchesSelector
+ || element.msMatchesSelector
+ || element.mozMatchesSelector
+ || element.oMatchesSelector;
+ matcher = function(selector) {
+ return function() {
+ return vendorMatches.call(this, selector);
+ };
+ };
+ }
+}
+
+var matcher$1 = matcher;
+
+var filterEvents = {};
+
+exports.event = null;
+
+if (typeof document !== "undefined") {
+ var element$1 = document.documentElement;
+ if (!("onmouseenter" in element$1)) {
+ filterEvents = {mouseenter: "mouseover", mouseleave: "mouseout"};
+ }
+}
+
+function filterContextListener(listener, index, group) {
+ listener = contextListener(listener, index, group);
+ return function(event) {
+ var related = event.relatedTarget;
+ if (!related || (related !== this && !(related.compareDocumentPosition(this) & 8))) {
+ listener.call(this, event);
+ }
+ };
+}
+
+function contextListener(listener, index, group) {
+ return function(event1) {
+ var event0 = exports.event; // Events can be reentrant (e.g., focus).
+ exports.event = event1;
+ try {
+ listener.call(this, this.__data__, index, group);
+ } finally {
+ exports.event = event0;
+ }
+ };
+}
+
+function parseTypenames(typenames) {
+ return typenames.trim().split(/^|\s+/).map(function(t) {
+ var name = "", i = t.indexOf(".");
+ if (i >= 0) name = t.slice(i + 1), t = t.slice(0, i);
+ return {type: t, name: name};
+ });
+}
+
+function onRemove(typename) {
+ return function() {
+ var on = this.__on;
+ if (!on) return;
+ for (var j = 0, i = -1, m = on.length, o; j < m; ++j) {
+ if (o = on[j], (!typename.type || o.type === typename.type) && o.name === typename.name) {
+ this.removeEventListener(o.type, o.listener, o.capture);
+ } else {
+ on[++i] = o;
+ }
+ }
+ if (++i) on.length = i;
+ else delete this.__on;
+ };
+}
+
+function onAdd(typename, value, capture) {
+ var wrap = filterEvents.hasOwnProperty(typename.type) ? filterContextListener : contextListener;
+ return function(d, i, group) {
+ var on = this.__on, o, listener = wrap(value, i, group);
+ if (on) for (var j = 0, m = on.length; j < m; ++j) {
+ if ((o = on[j]).type === typename.type && o.name === typename.name) {
+ this.removeEventListener(o.type, o.listener, o.capture);
+ this.addEventListener(o.type, o.listener = listener, o.capture = capture);
+ o.value = value;
+ return;
+ }
+ }
+ this.addEventListener(typename.type, listener, capture);
+ o = {type: typename.type, name: typename.name, value: value, listener: listener, capture: capture};
+ if (!on) this.__on = [o];
+ else on.push(o);
+ };
+}
+
+var selection_on = function(typename, value, capture) {
+ var typenames = parseTypenames(typename + ""), i, n = typenames.length, t;
+
+ if (arguments.length < 2) {
+ var on = this.node().__on;
+ if (on) for (var j = 0, m = on.length, o; j < m; ++j) {
+ for (i = 0, o = on[j]; i < n; ++i) {
+ if ((t = typenames[i]).type === o.type && t.name === o.name) {
+ return o.value;
+ }
+ }
+ }
+ return;
+ }
+
+ on = value ? onAdd : onRemove;
+ if (capture == null) capture = false;
+ for (i = 0; i < n; ++i) this.each(on(typenames[i], value, capture));
+ return this;
+};
+
+function none() {}
+
+var selector = function(selector) {
+ return selector == null ? none : function() {
+ return this.querySelector(selector);
+ };
+};
+
+var selection_select = function(select) {
+ if (typeof select !== "function") select = selector(select);
+
+ for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, subgroup = subgroups[j] = new Array(n), node, subnode, i = 0; i < n; ++i) {
+ if ((node = group[i]) && (subnode = select.call(node, node.__data__, i, group))) {
+ if ("__data__" in node) subnode.__data__ = node.__data__;
+ subgroup[i] = subnode;
+ }
+ }
+ }
+
+ return new Selection(subgroups, this._parents);
+};
+
+function empty() {
+ return [];
+}
+
+var selectorAll = function(selector) {
+ return selector == null ? empty : function() {
+ return this.querySelectorAll(selector);
+ };
+};
+
+var selection_selectAll = function(select) {
+ if (typeof select !== "function") select = selectorAll(select);
+
+ for (var groups = this._groups, m = groups.length, subgroups = [], parents = [], j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) {
+ if (node = group[i]) {
+ subgroups.push(select.call(node, node.__data__, i, group));
+ parents.push(node);
+ }
+ }
+ }
+
+ return new Selection(subgroups, parents);
+};
+
+var selection_filter = function(match) {
+ if (typeof match !== "function") match = matcher$1(match);
+
+ for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, subgroup = subgroups[j] = [], node, i = 0; i < n; ++i) {
+ if ((node = group[i]) && match.call(node, node.__data__, i, group)) {
+ subgroup.push(node);
+ }
+ }
+ }
+
+ return new Selection(subgroups, this._parents);
+};
+
+var sparse = function(update) {
+ return new Array(update.length);
+};
+
+var selection_enter = function() {
+ return new Selection(this._enter || this._groups.map(sparse), this._parents);
+};
+
+function EnterNode(parent, datum) {
+ this.ownerDocument = parent.ownerDocument;
+ this.namespaceURI = parent.namespaceURI;
+ this._next = null;
+ this._parent = parent;
+ this.__data__ = datum;
+}
+
+EnterNode.prototype = {
+ constructor: EnterNode,
+ appendChild: function(child) { return this._parent.insertBefore(child, this._next); },
+ insertBefore: function(child, next) { return this._parent.insertBefore(child, next); },
+ querySelector: function(selector) { return this._parent.querySelector(selector); },
+ querySelectorAll: function(selector) { return this._parent.querySelectorAll(selector); }
+};
+
+var constant = function(x) {
+ return function() {
+ return x;
+ };
+};
+
+var keyPrefix = "$"; // Protect against keys like “__proto__”.
+
+function bindIndex(parent, group, enter, update, exit, data) {
+ var i = 0,
+ node,
+ groupLength = group.length,
+ dataLength = data.length;
+
+ // Put any non-null nodes that fit into update.
+ // Put any null nodes into enter.
+ // Put any remaining data into enter.
+ for (; i < dataLength; ++i) {
+ if (node = group[i]) {
+ node.__data__ = data[i];
+ update[i] = node;
+ } else {
+ enter[i] = new EnterNode(parent, data[i]);
+ }
+ }
+
+ // Put any non-null nodes that don’t fit into exit.
+ for (; i < groupLength; ++i) {
+ if (node = group[i]) {
+ exit[i] = node;
+ }
+ }
+}
+
+function bindKey(parent, group, enter, update, exit, data, key) {
+ var i,
+ node,
+ nodeByKeyValue = {},
+ groupLength = group.length,
+ dataLength = data.length,
+ keyValues = new Array(groupLength),
+ keyValue;
+
+ // Compute the key for each node.
+ // If multiple nodes have the same key, the duplicates are added to exit.
+ for (i = 0; i < groupLength; ++i) {
+ if (node = group[i]) {
+ keyValues[i] = keyValue = keyPrefix + key.call(node, node.__data__, i, group);
+ if (keyValue in nodeByKeyValue) {
+ exit[i] = node;
+ } else {
+ nodeByKeyValue[keyValue] = node;
+ }
+ }
+ }
+
+ // Compute the key for each datum.
+ // If there a node associated with this key, join and add it to update.
+ // If there is not (or the key is a duplicate), add it to enter.
+ for (i = 0; i < dataLength; ++i) {
+ keyValue = keyPrefix + key.call(parent, data[i], i, data);
+ if (node = nodeByKeyValue[keyValue]) {
+ update[i] = node;
+ node.__data__ = data[i];
+ nodeByKeyValue[keyValue] = null;
+ } else {
+ enter[i] = new EnterNode(parent, data[i]);
+ }
+ }
+
+ // Add any remaining nodes that were not bound to data to exit.
+ for (i = 0; i < groupLength; ++i) {
+ if ((node = group[i]) && (nodeByKeyValue[keyValues[i]] === node)) {
+ exit[i] = node;
+ }
+ }
+}
+
+var selection_data = function(value, key) {
+ if (!value) {
+ data = new Array(this.size()), j = -1;
+ this.each(function(d) { data[++j] = d; });
+ return data;
+ }
+
+ var bind = key ? bindKey : bindIndex,
+ parents = this._parents,
+ groups = this._groups;
+
+ if (typeof value !== "function") value = constant(value);
+
+ for (var m = groups.length, update = new Array(m), enter = new Array(m), exit = new Array(m), j = 0; j < m; ++j) {
+ var parent = parents[j],
+ group = groups[j],
+ groupLength = group.length,
+ data = value.call(parent, parent && parent.__data__, j, parents),
+ dataLength = data.length,
+ enterGroup = enter[j] = new Array(dataLength),
+ updateGroup = update[j] = new Array(dataLength),
+ exitGroup = exit[j] = new Array(groupLength);
+
+ bind(parent, group, enterGroup, updateGroup, exitGroup, data, key);
+
+ // Now connect the enter nodes to their following update node, such that
+ // appendChild can insert the materialized enter node before this node,
+ // rather than at the end of the parent node.
+ for (var i0 = 0, i1 = 0, previous, next; i0 < dataLength; ++i0) {
+ if (previous = enterGroup[i0]) {
+ if (i0 >= i1) i1 = i0 + 1;
+ while (!(next = updateGroup[i1]) && ++i1 < dataLength);
+ previous._next = next || null;
+ }
+ }
+ }
+
+ update = new Selection(update, parents);
+ update._enter = enter;
+ update._exit = exit;
+ return update;
+};
+
+var selection_exit = function() {
+ return new Selection(this._exit || this._groups.map(sparse), this._parents);
+};
+
+var selection_merge = function(selection$$1) {
+
+ for (var groups0 = this._groups, groups1 = selection$$1._groups, m0 = groups0.length, m1 = groups1.length, m = Math.min(m0, m1), merges = new Array(m0), j = 0; j < m; ++j) {
+ for (var group0 = groups0[j], group1 = groups1[j], n = group0.length, merge = merges[j] = new Array(n), node, i = 0; i < n; ++i) {
+ if (node = group0[i] || group1[i]) {
+ merge[i] = node;
+ }
+ }
+ }
+
+ for (; j < m0; ++j) {
+ merges[j] = groups0[j];
+ }
+
+ return new Selection(merges, this._parents);
+};
+
+var selection_order = function() {
+
+ for (var groups = this._groups, j = -1, m = groups.length; ++j < m;) {
+ for (var group = groups[j], i = group.length - 1, next = group[i], node; --i >= 0;) {
+ if (node = group[i]) {
+ if (next && next !== node.nextSibling) next.parentNode.insertBefore(node, next);
+ next = node;
+ }
+ }
+ }
+
+ return this;
+};
+
+var selection_sort = function(compare) {
+ if (!compare) compare = ascending;
+
+ function compareNode(a, b) {
+ return a && b ? compare(a.__data__, b.__data__) : !a - !b;
+ }
+
+ for (var groups = this._groups, m = groups.length, sortgroups = new Array(m), j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, sortgroup = sortgroups[j] = new Array(n), node, i = 0; i < n; ++i) {
+ if (node = group[i]) {
+ sortgroup[i] = node;
+ }
+ }
+ sortgroup.sort(compareNode);
+ }
+
+ return new Selection(sortgroups, this._parents).order();
+};
+
+function ascending(a, b) {
+ return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN;
+}
+
+var selection_call = function() {
+ var callback = arguments[0];
+ arguments[0] = this;
+ callback.apply(null, arguments);
+ return this;
+};
+
+var selection_nodes = function() {
+ var nodes = new Array(this.size()), i = -1;
+ this.each(function() { nodes[++i] = this; });
+ return nodes;
+};
+
+var selection_node = function() {
+
+ for (var groups = this._groups, j = 0, m = groups.length; j < m; ++j) {
+ for (var group = groups[j], i = 0, n = group.length; i < n; ++i) {
+ var node = group[i];
+ if (node) return node;
+ }
+ }
+
+ return null;
+};
+
+var selection_size = function() {
+ var size = 0;
+ this.each(function() { ++size; });
+ return size;
+};
+
+var selection_empty = function() {
+ return !this.node();
+};
+
+var selection_each = function(callback) {
+
+ for (var groups = this._groups, j = 0, m = groups.length; j < m; ++j) {
+ for (var group = groups[j], i = 0, n = group.length, node; i < n; ++i) {
+ if (node = group[i]) callback.call(node, node.__data__, i, group);
+ }
+ }
+
+ return this;
+};
+
+function attrRemove(name) {
+ return function() {
+ this.removeAttribute(name);
+ };
+}
+
+function attrRemoveNS(fullname) {
+ return function() {
+ this.removeAttributeNS(fullname.space, fullname.local);
+ };
+}
+
+function attrConstant(name, value) {
+ return function() {
+ this.setAttribute(name, value);
+ };
+}
+
+function attrConstantNS(fullname, value) {
+ return function() {
+ this.setAttributeNS(fullname.space, fullname.local, value);
+ };
+}
+
+function attrFunction(name, value) {
+ return function() {
+ var v = value.apply(this, arguments);
+ if (v == null) this.removeAttribute(name);
+ else this.setAttribute(name, v);
+ };
+}
+
+function attrFunctionNS(fullname, value) {
+ return function() {
+ var v = value.apply(this, arguments);
+ if (v == null) this.removeAttributeNS(fullname.space, fullname.local);
+ else this.setAttributeNS(fullname.space, fullname.local, v);
+ };
+}
+
+var selection_attr = function(name, value) {
+ var fullname = namespace(name);
+
+ if (arguments.length < 2) {
+ var node = this.node();
+ return fullname.local
+ ? node.getAttributeNS(fullname.space, fullname.local)
+ : node.getAttribute(fullname);
+ }
+
+ return this.each((value == null
+ ? (fullname.local ? attrRemoveNS : attrRemove) : (typeof value === "function"
+ ? (fullname.local ? attrFunctionNS : attrFunction)
+ : (fullname.local ? attrConstantNS : attrConstant)))(fullname, value));
+};
+
+var defaultView = function(node) {
+ return (node.ownerDocument && node.ownerDocument.defaultView) // node is a Node
+ || (node.document && node) // node is a Window
+ || node.defaultView; // node is a Document
+};
+
+function styleRemove(name) {
+ return function() {
+ this.style.removeProperty(name);
+ };
+}
+
+function styleConstant(name, value, priority) {
+ return function() {
+ this.style.setProperty(name, value, priority);
+ };
+}
+
+function styleFunction(name, value, priority) {
+ return function() {
+ var v = value.apply(this, arguments);
+ if (v == null) this.style.removeProperty(name);
+ else this.style.setProperty(name, v, priority);
+ };
+}
+
+var selection_style = function(name, value, priority) {
+ return arguments.length > 1
+ ? this.each((value == null
+ ? styleRemove : typeof value === "function"
+ ? styleFunction
+ : styleConstant)(name, value, priority == null ? "" : priority))
+ : styleValue(this.node(), name);
+};
+
+function styleValue(node, name) {
+ return node.style.getPropertyValue(name)
+ || defaultView(node).getComputedStyle(node, null).getPropertyValue(name);
+}
+
+function propertyRemove(name) {
+ return function() {
+ delete this[name];
+ };
+}
+
+function propertyConstant(name, value) {
+ return function() {
+ this[name] = value;
+ };
+}
+
+function propertyFunction(name, value) {
+ return function() {
+ var v = value.apply(this, arguments);
+ if (v == null) delete this[name];
+ else this[name] = v;
+ };
+}
+
+var selection_property = function(name, value) {
+ return arguments.length > 1
+ ? this.each((value == null
+ ? propertyRemove : typeof value === "function"
+ ? propertyFunction
+ : propertyConstant)(name, value))
+ : this.node()[name];
+};
+
+function classArray(string) {
+ return string.trim().split(/^|\s+/);
+}
+
+function classList(node) {
+ return node.classList || new ClassList(node);
+}
+
+function ClassList(node) {
+ this._node = node;
+ this._names = classArray(node.getAttribute("class") || "");
+}
+
+ClassList.prototype = {
+ add: function(name) {
+ var i = this._names.indexOf(name);
+ if (i < 0) {
+ this._names.push(name);
+ this._node.setAttribute("class", this._names.join(" "));
+ }
+ },
+ remove: function(name) {
+ var i = this._names.indexOf(name);
+ if (i >= 0) {
+ this._names.splice(i, 1);
+ this._node.setAttribute("class", this._names.join(" "));
+ }
+ },
+ contains: function(name) {
+ return this._names.indexOf(name) >= 0;
+ }
+};
+
+function classedAdd(node, names) {
+ var list = classList(node), i = -1, n = names.length;
+ while (++i < n) list.add(names[i]);
+}
+
+function classedRemove(node, names) {
+ var list = classList(node), i = -1, n = names.length;
+ while (++i < n) list.remove(names[i]);
+}
+
+function classedTrue(names) {
+ return function() {
+ classedAdd(this, names);
+ };
+}
+
+function classedFalse(names) {
+ return function() {
+ classedRemove(this, names);
+ };
+}
+
+function classedFunction(names, value) {
+ return function() {
+ (value.apply(this, arguments) ? classedAdd : classedRemove)(this, names);
+ };
+}
+
+var selection_classed = function(name, value) {
+ var names = classArray(name + "");
+
+ if (arguments.length < 2) {
+ var list = classList(this.node()), i = -1, n = names.length;
+ while (++i < n) if (!list.contains(names[i])) return false;
+ return true;
+ }
+
+ return this.each((typeof value === "function"
+ ? classedFunction : value
+ ? classedTrue
+ : classedFalse)(names, value));
+};
+
+function textRemove() {
+ this.textContent = "";
+}
+
+function textConstant(value) {
+ return function() {
+ this.textContent = value;
+ };
+}
+
+function textFunction(value) {
+ return function() {
+ var v = value.apply(this, arguments);
+ this.textContent = v == null ? "" : v;
+ };
+}
+
+var selection_text = function(value) {
+ return arguments.length
+ ? this.each(value == null
+ ? textRemove : (typeof value === "function"
+ ? textFunction
+ : textConstant)(value))
+ : this.node().textContent;
+};
+
+function htmlRemove() {
+ this.innerHTML = "";
+}
+
+function htmlConstant(value) {
+ return function() {
+ this.innerHTML = value;
+ };
+}
+
+function htmlFunction(value) {
+ return function() {
+ var v = value.apply(this, arguments);
+ this.innerHTML = v == null ? "" : v;
+ };
+}
+
+var selection_html = function(value) {
+ return arguments.length
+ ? this.each(value == null
+ ? htmlRemove : (typeof value === "function"
+ ? htmlFunction
+ : htmlConstant)(value))
+ : this.node().innerHTML;
+};
+
+function raise() {
+ if (this.nextSibling) this.parentNode.appendChild(this);
+}
+
+var selection_raise = function() {
+ return this.each(raise);
+};
+
+function lower() {
+ if (this.previousSibling) this.parentNode.insertBefore(this, this.parentNode.firstChild);
+}
+
+var selection_lower = function() {
+ return this.each(lower);
+};
+
+var selection_append = function(name) {
+ var create = typeof name === "function" ? name : creator(name);
+ return this.select(function() {
+ return this.appendChild(create.apply(this, arguments));
+ });
+};
+
+function constantNull() {
+ return null;
+}
+
+var selection_insert = function(name, before) {
+ var create = typeof name === "function" ? name : creator(name),
+ select = before == null ? constantNull : typeof before === "function" ? before : selector(before);
+ return this.select(function() {
+ return this.insertBefore(create.apply(this, arguments), select.apply(this, arguments) || null);
+ });
+};
+
+function remove() {
+ var parent = this.parentNode;
+ if (parent) parent.removeChild(this);
+}
+
+var selection_remove = function() {
+ return this.each(remove);
+};
+
+var selection_datum = function(value) {
+ return arguments.length
+ ? this.property("__data__", value)
+ : this.node().__data__;
+};
+
+function dispatchEvent(node, type, params) {
+ var window = defaultView(node),
+ event = window.CustomEvent;
+
+ if (typeof event === "function") {
+ event = new event(type, params);
+ } else {
+ event = window.document.createEvent("Event");
+ if (params) event.initEvent(type, params.bubbles, params.cancelable), event.detail = params.detail;
+ else event.initEvent(type, false, false);
+ }
+
+ node.dispatchEvent(event);
+}
+
+function dispatchConstant(type, params) {
+ return function() {
+ return dispatchEvent(this, type, params);
+ };
+}
+
+function dispatchFunction(type, params) {
+ return function() {
+ return dispatchEvent(this, type, params.apply(this, arguments));
+ };
+}
+
+var selection_dispatch = function(type, params) {
+ return this.each((typeof params === "function"
+ ? dispatchFunction
+ : dispatchConstant)(type, params));
+};
+
+var root = [null];
+
+function Selection(groups, parents) {
+ this._groups = groups;
+ this._parents = parents;
+}
+
+function selection() {
+ return new Selection([[document.documentElement]], root);
+}
+
+Selection.prototype = selection.prototype = {
+ constructor: Selection,
+ select: selection_select,
+ selectAll: selection_selectAll,
+ filter: selection_filter,
+ data: selection_data,
+ enter: selection_enter,
+ exit: selection_exit,
+ merge: selection_merge,
+ order: selection_order,
+ sort: selection_sort,
+ call: selection_call,
+ nodes: selection_nodes,
+ node: selection_node,
+ size: selection_size,
+ empty: selection_empty,
+ each: selection_each,
+ attr: selection_attr,
+ style: selection_style,
+ property: selection_property,
+ classed: selection_classed,
+ text: selection_text,
+ html: selection_html,
+ raise: selection_raise,
+ lower: selection_lower,
+ append: selection_append,
+ insert: selection_insert,
+ remove: selection_remove,
+ datum: selection_datum,
+ on: selection_on,
+ dispatch: selection_dispatch
+};
+
+var select = function(selector) {
+ return typeof selector === "string"
+ ? new Selection([[document.querySelector(selector)]], [document.documentElement])
+ : new Selection([[selector]], root);
+};
+
+function count(node) {
+ var sum = 0,
+ children = node.children,
+ i = children && children.length;
+ if (!i) sum = 1;
+ else while (--i >= 0) sum += children[i].value;
+ node.value = sum;
+}
+
+var node_count = function() {
+ return this.eachAfter(count);
+};
+
+var node_each = function(callback) {
+ var node = this, current, next = [node], children, i, n;
+ do {
+ current = next.reverse(), next = [];
+ while (node = current.pop()) {
+ callback(node), children = node.children;
+ if (children) for (i = 0, n = children.length; i < n; ++i) {
+ next.push(children[i]);
+ }
+ }
+ } while (next.length);
+ return this;
+};
+
+var node_eachBefore = function(callback) {
+ var node = this, nodes = [node], children, i;
+ while (node = nodes.pop()) {
+ callback(node), children = node.children;
+ if (children) for (i = children.length - 1; i >= 0; --i) {
+ nodes.push(children[i]);
+ }
+ }
+ return this;
+};
+
+var node_eachAfter = function(callback) {
+ var node = this, nodes = [node], next = [], children, i, n;
+ while (node = nodes.pop()) {
+ next.push(node), children = node.children;
+ if (children) for (i = 0, n = children.length; i < n; ++i) {
+ nodes.push(children[i]);
+ }
+ }
+ while (node = next.pop()) {
+ callback(node);
+ }
+ return this;
+};
+
+var node_sum = function(value) {
+ return this.eachAfter(function(node) {
+ var sum = +value(node.data) || 0,
+ children = node.children,
+ i = children && children.length;
+ while (--i >= 0) sum += children[i].value;
+ node.value = sum;
+ });
+};
+
+var node_sort = function(compare) {
+ return this.eachBefore(function(node) {
+ if (node.children) {
+ node.children.sort(compare);
+ }
+ });
+};
+
+var node_path = function(end) {
+ var start = this,
+ ancestor = leastCommonAncestor(start, end),
+ nodes = [start];
+ while (start !== ancestor) {
+ start = start.parent;
+ nodes.push(start);
+ }
+ var k = nodes.length;
+ while (end !== ancestor) {
+ nodes.splice(k, 0, end);
+ end = end.parent;
+ }
+ return nodes;
+};
+
+function leastCommonAncestor(a, b) {
+ if (a === b) return a;
+ var aNodes = a.ancestors(),
+ bNodes = b.ancestors(),
+ c = null;
+ a = aNodes.pop();
+ b = bNodes.pop();
+ while (a === b) {
+ c = a;
+ a = aNodes.pop();
+ b = bNodes.pop();
+ }
+ return c;
+}
+
+var node_ancestors = function() {
+ var node = this, nodes = [node];
+ while (node = node.parent) {
+ nodes.push(node);
+ }
+ return nodes;
+};
+
+var node_descendants = function() {
+ var nodes = [];
+ this.each(function(node) {
+ nodes.push(node);
+ });
+ return nodes;
+};
+
+var node_leaves = function() {
+ var leaves = [];
+ this.eachBefore(function(node) {
+ if (!node.children) {
+ leaves.push(node);
+ }
+ });
+ return leaves;
+};
+
+var node_links = function() {
+ var root = this, links = [];
+ root.each(function(node) {
+ if (node !== root) { // Don’t include the root’s parent, if any.
+ links.push({source: node.parent, target: node});
+ }
+ });
+ return links;
+};
+
+function hierarchy(data, children) {
+ var root = new Node(data),
+ valued = +data.value && (root.value = data.value),
+ node,
+ nodes = [root],
+ child,
+ childs,
+ i,
+ n;
+
+ if (children == null) children = defaultChildren;
+
+ while (node = nodes.pop()) {
+ if (valued) node.value = +node.data.value;
+ if ((childs = children(node.data)) && (n = childs.length)) {
+ node.children = new Array(n);
+ for (i = n - 1; i >= 0; --i) {
+ nodes.push(child = node.children[i] = new Node(childs[i]));
+ child.parent = node;
+ child.depth = node.depth + 1;
+ }
+ }
+ }
+
+ return root.eachBefore(computeHeight);
+}
+
+function node_copy() {
+ return hierarchy(this).eachBefore(copyData);
+}
+
+function defaultChildren(d) {
+ return d.children;
+}
+
+function copyData(node) {
+ node.data = node.data.data;
+}
+
+function computeHeight(node) {
+ var height = 0;
+ do node.height = height;
+ while ((node = node.parent) && (node.height < ++height));
+}
+
+function Node(data) {
+ this.data = data;
+ this.depth =
+ this.height = 0;
+ this.parent = null;
+}
+
+Node.prototype = hierarchy.prototype = {
+ constructor: Node,
+ count: node_count,
+ each: node_each,
+ eachAfter: node_eachAfter,
+ eachBefore: node_eachBefore,
+ sum: node_sum,
+ sort: node_sort,
+ path: node_path,
+ ancestors: node_ancestors,
+ descendants: node_descendants,
+ leaves: node_leaves,
+ links: node_links,
+ copy: node_copy
+};
+
+var roundNode = function(node) {
+ node.x0 = Math.round(node.x0);
+ node.y0 = Math.round(node.y0);
+ node.x1 = Math.round(node.x1);
+ node.y1 = Math.round(node.y1);
+};
+
+var treemapDice = function(parent, x0, y0, x1, y1) {
+ var nodes = parent.children,
+ node,
+ i = -1,
+ n = nodes.length,
+ k = parent.value && (x1 - x0) / parent.value;
+
+ while (++i < n) {
+ node = nodes[i], node.y0 = y0, node.y1 = y1;
+ node.x0 = x0, node.x1 = x0 += node.value * k;
+ }
+};
+
+var partition = function() {
+ var dx = 1,
+ dy = 1,
+ padding = 0,
+ round = false;
+
+ function partition(root) {
+ var n = root.height + 1;
+ root.x0 =
+ root.y0 = padding;
+ root.x1 = dx;
+ root.y1 = dy / n;
+ root.eachBefore(positionNode(dy, n));
+ if (round) root.eachBefore(roundNode);
+ return root;
+ }
+
+ function positionNode(dy, n) {
+ return function(node) {
+ if (node.children) {
+ treemapDice(node, node.x0, dy * (node.depth + 1) / n, node.x1, dy * (node.depth + 2) / n);
+ }
+ var x0 = node.x0,
+ y0 = node.y0,
+ x1 = node.x1 - padding,
+ y1 = node.y1 - padding;
+ if (x1 < x0) x0 = x1 = (x0 + x1) / 2;
+ if (y1 < y0) y0 = y1 = (y0 + y1) / 2;
+ node.x0 = x0;
+ node.y0 = y0;
+ node.x1 = x1;
+ node.y1 = y1;
+ };
+ }
+
+ partition.round = function(x) {
+ return arguments.length ? (round = !!x, partition) : round;
+ };
+
+ partition.size = function(x) {
+ return arguments.length ? (dx = +x[0], dy = +x[1], partition) : [dx, dy];
+ };
+
+ partition.padding = function(x) {
+ return arguments.length ? (padding = +x, partition) : padding;
+ };
+
+ return partition;
+};
+
+var ascending$1 = function(a, b) {
+ return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN;
+};
+
+var bisector = function(compare) {
+ if (compare.length === 1) compare = ascendingComparator(compare);
+ return {
+ left: function(a, x, lo, hi) {
+ if (lo == null) lo = 0;
+ if (hi == null) hi = a.length;
+ while (lo < hi) {
+ var mid = lo + hi >>> 1;
+ if (compare(a[mid], x) < 0) lo = mid + 1;
+ else hi = mid;
+ }
+ return lo;
+ },
+ right: function(a, x, lo, hi) {
+ if (lo == null) lo = 0;
+ if (hi == null) hi = a.length;
+ while (lo < hi) {
+ var mid = lo + hi >>> 1;
+ if (compare(a[mid], x) > 0) hi = mid;
+ else lo = mid + 1;
+ }
+ return lo;
+ }
+ };
+};
+
+function ascendingComparator(f) {
+ return function(d, x) {
+ return ascending$1(f(d), x);
+ };
+}
+
+var ascendingBisect = bisector(ascending$1);
+var bisectRight = ascendingBisect.right;
+
+var e10 = Math.sqrt(50);
+var e5 = Math.sqrt(10);
+var e2 = Math.sqrt(2);
+
+var ticks = function(start, stop, count) {
+ var reverse,
+ i = -1,
+ n,
+ ticks,
+ step;
+
+ stop = +stop, start = +start, count = +count;
+ if (start === stop && count > 0) return [start];
+ if (reverse = stop < start) n = start, start = stop, stop = n;
+ if ((step = tickIncrement(start, stop, count)) === 0 || !isFinite(step)) return [];
+
+ if (step > 0) {
+ start = Math.ceil(start / step);
+ stop = Math.floor(stop / step);
+ ticks = new Array(n = Math.ceil(stop - start + 1));
+ while (++i < n) ticks[i] = (start + i) * step;
+ } else {
+ start = Math.floor(start * step);
+ stop = Math.ceil(stop * step);
+ ticks = new Array(n = Math.ceil(start - stop + 1));
+ while (++i < n) ticks[i] = (start - i) / step;
+ }
+
+ if (reverse) ticks.reverse();
+
+ return ticks;
+};
+
+function tickIncrement(start, stop, count) {
+ var step = (stop - start) / Math.max(0, count),
+ power = Math.floor(Math.log(step) / Math.LN10),
+ error = step / Math.pow(10, power);
+ return power >= 0
+ ? (error >= e10 ? 10 : error >= e5 ? 5 : error >= e2 ? 2 : 1) * Math.pow(10, power)
+ : -Math.pow(10, -power) / (error >= e10 ? 10 : error >= e5 ? 5 : error >= e2 ? 2 : 1);
+}
+
+function tickStep(start, stop, count) {
+ var step0 = Math.abs(stop - start) / Math.max(0, count),
+ step1 = Math.pow(10, Math.floor(Math.log(step0) / Math.LN10)),
+ error = step0 / step1;
+ if (error >= e10) step1 *= 10;
+ else if (error >= e5) step1 *= 5;
+ else if (error >= e2) step1 *= 2;
+ return stop < start ? -step1 : step1;
+}
+
+var prefix = "$";
+
+function Map() {}
+
+Map.prototype = map$1.prototype = {
+ constructor: Map,
+ has: function(key) {
+ return (prefix + key) in this;
+ },
+ get: function(key) {
+ return this[prefix + key];
+ },
+ set: function(key, value) {
+ this[prefix + key] = value;
+ return this;
+ },
+ remove: function(key) {
+ var property = prefix + key;
+ return property in this && delete this[property];
+ },
+ clear: function() {
+ for (var property in this) if (property[0] === prefix) delete this[property];
+ },
+ keys: function() {
+ var keys = [];
+ for (var property in this) if (property[0] === prefix) keys.push(property.slice(1));
+ return keys;
+ },
+ values: function() {
+ var values = [];
+ for (var property in this) if (property[0] === prefix) values.push(this[property]);
+ return values;
+ },
+ entries: function() {
+ var entries = [];
+ for (var property in this) if (property[0] === prefix) entries.push({key: property.slice(1), value: this[property]});
+ return entries;
+ },
+ size: function() {
+ var size = 0;
+ for (var property in this) if (property[0] === prefix) ++size;
+ return size;
+ },
+ empty: function() {
+ for (var property in this) if (property[0] === prefix) return false;
+ return true;
+ },
+ each: function(f) {
+ for (var property in this) if (property[0] === prefix) f(this[property], property.slice(1), this);
+ }
+};
+
+function map$1(object, f) {
+ var map = new Map;
+
+ // Copy constructor.
+ if (object instanceof Map) object.each(function(value, key) { map.set(key, value); });
+
+ // Index array by numeric index or specified key function.
+ else if (Array.isArray(object)) {
+ var i = -1,
+ n = object.length,
+ o;
+
+ if (f == null) while (++i < n) map.set(i, object[i]);
+ else while (++i < n) map.set(f(o = object[i], i, object), o);
+ }
+
+ // Convert object to map.
+ else if (object) for (var key in object) map.set(key, object[key]);
+
+ return map;
+}
+
+function Set() {}
+
+var proto = map$1.prototype;
+
+Set.prototype = set.prototype = {
+ constructor: Set,
+ has: proto.has,
+ add: function(value) {
+ value += "";
+ this[prefix + value] = value;
+ return this;
+ },
+ remove: proto.remove,
+ clear: proto.clear,
+ values: proto.keys,
+ size: proto.size,
+ empty: proto.empty,
+ each: proto.each
+};
+
+function set(object, f) {
+ var set = new Set;
+
+ // Copy constructor.
+ if (object instanceof Set) object.each(function(value) { set.add(value); });
+
+ // Otherwise, assume it’s an array.
+ else if (object) {
+ var i = -1, n = object.length;
+ if (f == null) while (++i < n) set.add(object[i]);
+ else while (++i < n) set.add(f(object[i], i, object));
+ }
+
+ return set;
+}
+
+var array$1 = Array.prototype;
+
+var map$3 = array$1.map;
+var slice$2 = array$1.slice;
+
+var define = function(constructor, factory, prototype) {
+ constructor.prototype = factory.prototype = prototype;
+ prototype.constructor = constructor;
+};
+
+function extend(parent, definition) {
+ var prototype = Object.create(parent.prototype);
+ for (var key in definition) prototype[key] = definition[key];
+ return prototype;
+}
+
+function Color() {}
+
+var darker = 0.7;
+var brighter = 1 / darker;
+
+var reI = "\\s*([+-]?\\d+)\\s*";
+var reN = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)\\s*";
+var reP = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)%\\s*";
+var reHex3 = /^#([0-9a-f]{3})$/;
+var reHex6 = /^#([0-9a-f]{6})$/;
+var reRgbInteger = new RegExp("^rgb\\(" + [reI, reI, reI] + "\\)$");
+var reRgbPercent = new RegExp("^rgb\\(" + [reP, reP, reP] + "\\)$");
+var reRgbaInteger = new RegExp("^rgba\\(" + [reI, reI, reI, reN] + "\\)$");
+var reRgbaPercent = new RegExp("^rgba\\(" + [reP, reP, reP, reN] + "\\)$");
+var reHslPercent = new RegExp("^hsl\\(" + [reN, reP, reP] + "\\)$");
+var reHslaPercent = new RegExp("^hsla\\(" + [reN, reP, reP, reN] + "\\)$");
+
+var named = {
+ aliceblue: 0xf0f8ff,
+ antiquewhite: 0xfaebd7,
+ aqua: 0x00ffff,
+ aquamarine: 0x7fffd4,
+ azure: 0xf0ffff,
+ beige: 0xf5f5dc,
+ bisque: 0xffe4c4,
+ black: 0x000000,
+ blanchedalmond: 0xffebcd,
+ blue: 0x0000ff,
+ blueviolet: 0x8a2be2,
+ brown: 0xa52a2a,
+ burlywood: 0xdeb887,
+ cadetblue: 0x5f9ea0,
+ chartreuse: 0x7fff00,
+ chocolate: 0xd2691e,
+ coral: 0xff7f50,
+ cornflowerblue: 0x6495ed,
+ cornsilk: 0xfff8dc,
+ crimson: 0xdc143c,
+ cyan: 0x00ffff,
+ darkblue: 0x00008b,
+ darkcyan: 0x008b8b,
+ darkgoldenrod: 0xb8860b,
+ darkgray: 0xa9a9a9,
+ darkgreen: 0x006400,
+ darkgrey: 0xa9a9a9,
+ darkkhaki: 0xbdb76b,
+ darkmagenta: 0x8b008b,
+ darkolivegreen: 0x556b2f,
+ darkorange: 0xff8c00,
+ darkorchid: 0x9932cc,
+ darkred: 0x8b0000,
+ darksalmon: 0xe9967a,
+ darkseagreen: 0x8fbc8f,
+ darkslateblue: 0x483d8b,
+ darkslategray: 0x2f4f4f,
+ darkslategrey: 0x2f4f4f,
+ darkturquoise: 0x00ced1,
+ darkviolet: 0x9400d3,
+ deeppink: 0xff1493,
+ deepskyblue: 0x00bfff,
+ dimgray: 0x696969,
+ dimgrey: 0x696969,
+ dodgerblue: 0x1e90ff,
+ firebrick: 0xb22222,
+ floralwhite: 0xfffaf0,
+ forestgreen: 0x228b22,
+ fuchsia: 0xff00ff,
+ gainsboro: 0xdcdcdc,
+ ghostwhite: 0xf8f8ff,
+ gold: 0xffd700,
+ goldenrod: 0xdaa520,
+ gray: 0x808080,
+ green: 0x008000,
+ greenyellow: 0xadff2f,
+ grey: 0x808080,
+ honeydew: 0xf0fff0,
+ hotpink: 0xff69b4,
+ indianred: 0xcd5c5c,
+ indigo: 0x4b0082,
+ ivory: 0xfffff0,
+ khaki: 0xf0e68c,
+ lavender: 0xe6e6fa,
+ lavenderblush: 0xfff0f5,
+ lawngreen: 0x7cfc00,
+ lemonchiffon: 0xfffacd,
+ lightblue: 0xadd8e6,
+ lightcoral: 0xf08080,
+ lightcyan: 0xe0ffff,
+ lightgoldenrodyellow: 0xfafad2,
+ lightgray: 0xd3d3d3,
+ lightgreen: 0x90ee90,
+ lightgrey: 0xd3d3d3,
+ lightpink: 0xffb6c1,
+ lightsalmon: 0xffa07a,
+ lightseagreen: 0x20b2aa,
+ lightskyblue: 0x87cefa,
+ lightslategray: 0x778899,
+ lightslategrey: 0x778899,
+ lightsteelblue: 0xb0c4de,
+ lightyellow: 0xffffe0,
+ lime: 0x00ff00,
+ limegreen: 0x32cd32,
+ linen: 0xfaf0e6,
+ magenta: 0xff00ff,
+ maroon: 0x800000,
+ mediumaquamarine: 0x66cdaa,
+ mediumblue: 0x0000cd,
+ mediumorchid: 0xba55d3,
+ mediumpurple: 0x9370db,
+ mediumseagreen: 0x3cb371,
+ mediumslateblue: 0x7b68ee,
+ mediumspringgreen: 0x00fa9a,
+ mediumturquoise: 0x48d1cc,
+ mediumvioletred: 0xc71585,
+ midnightblue: 0x191970,
+ mintcream: 0xf5fffa,
+ mistyrose: 0xffe4e1,
+ moccasin: 0xffe4b5,
+ navajowhite: 0xffdead,
+ navy: 0x000080,
+ oldlace: 0xfdf5e6,
+ olive: 0x808000,
+ olivedrab: 0x6b8e23,
+ orange: 0xffa500,
+ orangered: 0xff4500,
+ orchid: 0xda70d6,
+ palegoldenrod: 0xeee8aa,
+ palegreen: 0x98fb98,
+ paleturquoise: 0xafeeee,
+ palevioletred: 0xdb7093,
+ papayawhip: 0xffefd5,
+ peachpuff: 0xffdab9,
+ peru: 0xcd853f,
+ pink: 0xffc0cb,
+ plum: 0xdda0dd,
+ powderblue: 0xb0e0e6,
+ purple: 0x800080,
+ rebeccapurple: 0x663399,
+ red: 0xff0000,
+ rosybrown: 0xbc8f8f,
+ royalblue: 0x4169e1,
+ saddlebrown: 0x8b4513,
+ salmon: 0xfa8072,
+ sandybrown: 0xf4a460,
+ seagreen: 0x2e8b57,
+ seashell: 0xfff5ee,
+ sienna: 0xa0522d,
+ silver: 0xc0c0c0,
+ skyblue: 0x87ceeb,
+ slateblue: 0x6a5acd,
+ slategray: 0x708090,
+ slategrey: 0x708090,
+ snow: 0xfffafa,
+ springgreen: 0x00ff7f,
+ steelblue: 0x4682b4,
+ tan: 0xd2b48c,
+ teal: 0x008080,
+ thistle: 0xd8bfd8,
+ tomato: 0xff6347,
+ turquoise: 0x40e0d0,
+ violet: 0xee82ee,
+ wheat: 0xf5deb3,
+ white: 0xffffff,
+ whitesmoke: 0xf5f5f5,
+ yellow: 0xffff00,
+ yellowgreen: 0x9acd32
+};
+
+define(Color, color, {
+ displayable: function() {
+ return this.rgb().displayable();
+ },
+ toString: function() {
+ return this.rgb() + "";
+ }
+});
+
+function color(format) {
+ var m;
+ format = (format + "").trim().toLowerCase();
+ return (m = reHex3.exec(format)) ? (m = parseInt(m[1], 16), new Rgb((m >> 8 & 0xf) | (m >> 4 & 0x0f0), (m >> 4 & 0xf) | (m & 0xf0), ((m & 0xf) << 4) | (m & 0xf), 1)) // #f00
+ : (m = reHex6.exec(format)) ? rgbn(parseInt(m[1], 16)) // #ff0000
+ : (m = reRgbInteger.exec(format)) ? new Rgb(m[1], m[2], m[3], 1) // rgb(255, 0, 0)
+ : (m = reRgbPercent.exec(format)) ? new Rgb(m[1] * 255 / 100, m[2] * 255 / 100, m[3] * 255 / 100, 1) // rgb(100%, 0%, 0%)
+ : (m = reRgbaInteger.exec(format)) ? rgba(m[1], m[2], m[3], m[4]) // rgba(255, 0, 0, 1)
+ : (m = reRgbaPercent.exec(format)) ? rgba(m[1] * 255 / 100, m[2] * 255 / 100, m[3] * 255 / 100, m[4]) // rgb(100%, 0%, 0%, 1)
+ : (m = reHslPercent.exec(format)) ? hsla(m[1], m[2] / 100, m[3] / 100, 1) // hsl(120, 50%, 50%)
+ : (m = reHslaPercent.exec(format)) ? hsla(m[1], m[2] / 100, m[3] / 100, m[4]) // hsla(120, 50%, 50%, 1)
+ : named.hasOwnProperty(format) ? rgbn(named[format])
+ : format === "transparent" ? new Rgb(NaN, NaN, NaN, 0)
+ : null;
+}
+
+function rgbn(n) {
+ return new Rgb(n >> 16 & 0xff, n >> 8 & 0xff, n & 0xff, 1);
+}
+
+function rgba(r, g, b, a) {
+ if (a <= 0) r = g = b = NaN;
+ return new Rgb(r, g, b, a);
+}
+
+function rgbConvert(o) {
+ if (!(o instanceof Color)) o = color(o);
+ if (!o) return new Rgb;
+ o = o.rgb();
+ return new Rgb(o.r, o.g, o.b, o.opacity);
+}
+
+function rgb(r, g, b, opacity) {
+ return arguments.length === 1 ? rgbConvert(r) : new Rgb(r, g, b, opacity == null ? 1 : opacity);
+}
+
+function Rgb(r, g, b, opacity) {
+ this.r = +r;
+ this.g = +g;
+ this.b = +b;
+ this.opacity = +opacity;
+}
+
+define(Rgb, rgb, extend(Color, {
+ brighter: function(k) {
+ k = k == null ? brighter : Math.pow(brighter, k);
+ return new Rgb(this.r * k, this.g * k, this.b * k, this.opacity);
+ },
+ darker: function(k) {
+ k = k == null ? darker : Math.pow(darker, k);
+ return new Rgb(this.r * k, this.g * k, this.b * k, this.opacity);
+ },
+ rgb: function() {
+ return this;
+ },
+ displayable: function() {
+ return (0 <= this.r && this.r <= 255)
+ && (0 <= this.g && this.g <= 255)
+ && (0 <= this.b && this.b <= 255)
+ && (0 <= this.opacity && this.opacity <= 1);
+ },
+ toString: function() {
+ var a = this.opacity; a = isNaN(a) ? 1 : Math.max(0, Math.min(1, a));
+ return (a === 1 ? "rgb(" : "rgba(")
+ + Math.max(0, Math.min(255, Math.round(this.r) || 0)) + ", "
+ + Math.max(0, Math.min(255, Math.round(this.g) || 0)) + ", "
+ + Math.max(0, Math.min(255, Math.round(this.b) || 0))
+ + (a === 1 ? ")" : ", " + a + ")");
+ }
+}));
+
+function hsla(h, s, l, a) {
+ if (a <= 0) h = s = l = NaN;
+ else if (l <= 0 || l >= 1) h = s = NaN;
+ else if (s <= 0) h = NaN;
+ return new Hsl(h, s, l, a);
+}
+
+function hslConvert(o) {
+ if (o instanceof Hsl) return new Hsl(o.h, o.s, o.l, o.opacity);
+ if (!(o instanceof Color)) o = color(o);
+ if (!o) return new Hsl;
+ if (o instanceof Hsl) return o;
+ o = o.rgb();
+ var r = o.r / 255,
+ g = o.g / 255,
+ b = o.b / 255,
+ min = Math.min(r, g, b),
+ max = Math.max(r, g, b),
+ h = NaN,
+ s = max - min,
+ l = (max + min) / 2;
+ if (s) {
+ if (r === max) h = (g - b) / s + (g < b) * 6;
+ else if (g === max) h = (b - r) / s + 2;
+ else h = (r - g) / s + 4;
+ s /= l < 0.5 ? max + min : 2 - max - min;
+ h *= 60;
+ } else {
+ s = l > 0 && l < 1 ? 0 : h;
+ }
+ return new Hsl(h, s, l, o.opacity);
+}
+
+function hsl(h, s, l, opacity) {
+ return arguments.length === 1 ? hslConvert(h) : new Hsl(h, s, l, opacity == null ? 1 : opacity);
+}
+
+function Hsl(h, s, l, opacity) {
+ this.h = +h;
+ this.s = +s;
+ this.l = +l;
+ this.opacity = +opacity;
+}
+
+define(Hsl, hsl, extend(Color, {
+ brighter: function(k) {
+ k = k == null ? brighter : Math.pow(brighter, k);
+ return new Hsl(this.h, this.s, this.l * k, this.opacity);
+ },
+ darker: function(k) {
+ k = k == null ? darker : Math.pow(darker, k);
+ return new Hsl(this.h, this.s, this.l * k, this.opacity);
+ },
+ rgb: function() {
+ var h = this.h % 360 + (this.h < 0) * 360,
+ s = isNaN(h) || isNaN(this.s) ? 0 : this.s,
+ l = this.l,
+ m2 = l + (l < 0.5 ? l : 1 - l) * s,
+ m1 = 2 * l - m2;
+ return new Rgb(
+ hsl2rgb(h >= 240 ? h - 240 : h + 120, m1, m2),
+ hsl2rgb(h, m1, m2),
+ hsl2rgb(h < 120 ? h + 240 : h - 120, m1, m2),
+ this.opacity
+ );
+ },
+ displayable: function() {
+ return (0 <= this.s && this.s <= 1 || isNaN(this.s))
+ && (0 <= this.l && this.l <= 1)
+ && (0 <= this.opacity && this.opacity <= 1);
+ }
+}));
+
+/* From FvD 13.37, CSS Color Module Level 3 */
+function hsl2rgb(h, m1, m2) {
+ return (h < 60 ? m1 + (m2 - m1) * h / 60
+ : h < 180 ? m2
+ : h < 240 ? m1 + (m2 - m1) * (240 - h) / 60
+ : m1) * 255;
+}
+
+var deg2rad = Math.PI / 180;
+var rad2deg = 180 / Math.PI;
+
+var Kn = 18;
+var Xn = 0.950470;
+var Yn = 1;
+var Zn = 1.088830;
+var t0 = 4 / 29;
+var t1 = 6 / 29;
+var t2 = 3 * t1 * t1;
+var t3 = t1 * t1 * t1;
+
+function labConvert(o) {
+ if (o instanceof Lab) return new Lab(o.l, o.a, o.b, o.opacity);
+ if (o instanceof Hcl) {
+ var h = o.h * deg2rad;
+ return new Lab(o.l, Math.cos(h) * o.c, Math.sin(h) * o.c, o.opacity);
+ }
+ if (!(o instanceof Rgb)) o = rgbConvert(o);
+ var b = rgb2xyz(o.r),
+ a = rgb2xyz(o.g),
+ l = rgb2xyz(o.b),
+ x = xyz2lab((0.4124564 * b + 0.3575761 * a + 0.1804375 * l) / Xn),
+ y = xyz2lab((0.2126729 * b + 0.7151522 * a + 0.0721750 * l) / Yn),
+ z = xyz2lab((0.0193339 * b + 0.1191920 * a + 0.9503041 * l) / Zn);
+ return new Lab(116 * y - 16, 500 * (x - y), 200 * (y - z), o.opacity);
+}
+
+function lab(l, a, b, opacity) {
+ return arguments.length === 1 ? labConvert(l) : new Lab(l, a, b, opacity == null ? 1 : opacity);
+}
+
+function Lab(l, a, b, opacity) {
+ this.l = +l;
+ this.a = +a;
+ this.b = +b;
+ this.opacity = +opacity;
+}
+
+define(Lab, lab, extend(Color, {
+ brighter: function(k) {
+ return new Lab(this.l + Kn * (k == null ? 1 : k), this.a, this.b, this.opacity);
+ },
+ darker: function(k) {
+ return new Lab(this.l - Kn * (k == null ? 1 : k), this.a, this.b, this.opacity);
+ },
+ rgb: function() {
+ var y = (this.l + 16) / 116,
+ x = isNaN(this.a) ? y : y + this.a / 500,
+ z = isNaN(this.b) ? y : y - this.b / 200;
+ y = Yn * lab2xyz(y);
+ x = Xn * lab2xyz(x);
+ z = Zn * lab2xyz(z);
+ return new Rgb(
+ xyz2rgb( 3.2404542 * x - 1.5371385 * y - 0.4985314 * z), // D65 -> sRGB
+ xyz2rgb(-0.9692660 * x + 1.8760108 * y + 0.0415560 * z),
+ xyz2rgb( 0.0556434 * x - 0.2040259 * y + 1.0572252 * z),
+ this.opacity
+ );
+ }
+}));
+
+function xyz2lab(t) {
+ return t > t3 ? Math.pow(t, 1 / 3) : t / t2 + t0;
+}
+
+function lab2xyz(t) {
+ return t > t1 ? t * t * t : t2 * (t - t0);
+}
+
+function xyz2rgb(x) {
+ return 255 * (x <= 0.0031308 ? 12.92 * x : 1.055 * Math.pow(x, 1 / 2.4) - 0.055);
+}
+
+function rgb2xyz(x) {
+ return (x /= 255) <= 0.04045 ? x / 12.92 : Math.pow((x + 0.055) / 1.055, 2.4);
+}
+
+function hclConvert(o) {
+ if (o instanceof Hcl) return new Hcl(o.h, o.c, o.l, o.opacity);
+ if (!(o instanceof Lab)) o = labConvert(o);
+ var h = Math.atan2(o.b, o.a) * rad2deg;
+ return new Hcl(h < 0 ? h + 360 : h, Math.sqrt(o.a * o.a + o.b * o.b), o.l, o.opacity);
+}
+
+function hcl(h, c, l, opacity) {
+ return arguments.length === 1 ? hclConvert(h) : new Hcl(h, c, l, opacity == null ? 1 : opacity);
+}
+
+function Hcl(h, c, l, opacity) {
+ this.h = +h;
+ this.c = +c;
+ this.l = +l;
+ this.opacity = +opacity;
+}
+
+define(Hcl, hcl, extend(Color, {
+ brighter: function(k) {
+ return new Hcl(this.h, this.c, this.l + Kn * (k == null ? 1 : k), this.opacity);
+ },
+ darker: function(k) {
+ return new Hcl(this.h, this.c, this.l - Kn * (k == null ? 1 : k), this.opacity);
+ },
+ rgb: function() {
+ return labConvert(this).rgb();
+ }
+}));
+
+var A = -0.14861;
+var B = +1.78277;
+var C = -0.29227;
+var D = -0.90649;
+var E = +1.97294;
+var ED = E * D;
+var EB = E * B;
+var BC_DA = B * C - D * A;
+
+function cubehelixConvert(o) {
+ if (o instanceof Cubehelix) return new Cubehelix(o.h, o.s, o.l, o.opacity);
+ if (!(o instanceof Rgb)) o = rgbConvert(o);
+ var r = o.r / 255,
+ g = o.g / 255,
+ b = o.b / 255,
+ l = (BC_DA * b + ED * r - EB * g) / (BC_DA + ED - EB),
+ bl = b - l,
+ k = (E * (g - l) - C * bl) / D,
+ s = Math.sqrt(k * k + bl * bl) / (E * l * (1 - l)), // NaN if l=0 or l=1
+ h = s ? Math.atan2(k, bl) * rad2deg - 120 : NaN;
+ return new Cubehelix(h < 0 ? h + 360 : h, s, l, o.opacity);
+}
+
+function cubehelix(h, s, l, opacity) {
+ return arguments.length === 1 ? cubehelixConvert(h) : new Cubehelix(h, s, l, opacity == null ? 1 : opacity);
+}
+
+function Cubehelix(h, s, l, opacity) {
+ this.h = +h;
+ this.s = +s;
+ this.l = +l;
+ this.opacity = +opacity;
+}
+
+define(Cubehelix, cubehelix, extend(Color, {
+ brighter: function(k) {
+ k = k == null ? brighter : Math.pow(brighter, k);
+ return new Cubehelix(this.h, this.s, this.l * k, this.opacity);
+ },
+ darker: function(k) {
+ k = k == null ? darker : Math.pow(darker, k);
+ return new Cubehelix(this.h, this.s, this.l * k, this.opacity);
+ },
+ rgb: function() {
+ var h = isNaN(this.h) ? 0 : (this.h + 120) * deg2rad,
+ l = +this.l,
+ a = isNaN(this.s) ? 0 : this.s * l * (1 - l),
+ cosh = Math.cos(h),
+ sinh = Math.sin(h);
+ return new Rgb(
+ 255 * (l + a * (A * cosh + B * sinh)),
+ 255 * (l + a * (C * cosh + D * sinh)),
+ 255 * (l + a * (E * cosh)),
+ this.opacity
+ );
+ }
+}));
+
+var constant$3 = function(x) {
+ return function() {
+ return x;
+ };
+};
+
+function linear$1(a, d) {
+ return function(t) {
+ return a + t * d;
+ };
+}
+
+function exponential(a, b, y) {
+ return a = Math.pow(a, y), b = Math.pow(b, y) - a, y = 1 / y, function(t) {
+ return Math.pow(a + t * b, y);
+ };
+}
+
+function hue(a, b) {
+ var d = b - a;
+ return d ? linear$1(a, d > 180 || d < -180 ? d - 360 * Math.round(d / 360) : d) : constant$3(isNaN(a) ? b : a);
+}
+
+function gamma(y) {
+ return (y = +y) === 1 ? nogamma : function(a, b) {
+ return b - a ? exponential(a, b, y) : constant$3(isNaN(a) ? b : a);
+ };
+}
+
+function nogamma(a, b) {
+ var d = b - a;
+ return d ? linear$1(a, d) : constant$3(isNaN(a) ? b : a);
+}
+
+var interpolateRgb = (function rgbGamma(y) {
+ var color$$1 = gamma(y);
+
+ function rgb$$1(start, end) {
+ var r = color$$1((start = rgb(start)).r, (end = rgb(end)).r),
+ g = color$$1(start.g, end.g),
+ b = color$$1(start.b, end.b),
+ opacity = nogamma(start.opacity, end.opacity);
+ return function(t) {
+ start.r = r(t);
+ start.g = g(t);
+ start.b = b(t);
+ start.opacity = opacity(t);
+ return start + "";
+ };
+ }
+
+ rgb$$1.gamma = rgbGamma;
+
+ return rgb$$1;
+})(1);
+
+var array$2 = function(a, b) {
+ var nb = b ? b.length : 0,
+ na = a ? Math.min(nb, a.length) : 0,
+ x = new Array(nb),
+ c = new Array(nb),
+ i;
+
+ for (i = 0; i < na; ++i) x[i] = interpolateValue(a[i], b[i]);
+ for (; i < nb; ++i) c[i] = b[i];
+
+ return function(t) {
+ for (i = 0; i < na; ++i) c[i] = x[i](t);
+ return c;
+ };
+};
+
+var date = function(a, b) {
+ var d = new Date;
+ return a = +a, b -= a, function(t) {
+ return d.setTime(a + b * t), d;
+ };
+};
+
+var interpolateNumber = function(a, b) {
+ return a = +a, b -= a, function(t) {
+ return a + b * t;
+ };
+};
+
+var object = function(a, b) {
+ var i = {},
+ c = {},
+ k;
+
+ if (a === null || typeof a !== "object") a = {};
+ if (b === null || typeof b !== "object") b = {};
+
+ for (k in b) {
+ if (k in a) {
+ i[k] = interpolateValue(a[k], b[k]);
+ } else {
+ c[k] = b[k];
+ }
+ }
+
+ return function(t) {
+ for (k in i) c[k] = i[k](t);
+ return c;
+ };
+};
+
+var reA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g;
+var reB = new RegExp(reA.source, "g");
+
+function zero(b) {
+ return function() {
+ return b;
+ };
+}
+
+function one(b) {
+ return function(t) {
+ return b(t) + "";
+ };
+}
+
+var interpolateString = function(a, b) {
+ var bi = reA.lastIndex = reB.lastIndex = 0, // scan index for next number in b
+ am, // current match in a
+ bm, // current match in b
+ bs, // string preceding current number in b, if any
+ i = -1, // index in s
+ s = [], // string constants and placeholders
+ q = []; // number interpolators
+
+ // Coerce inputs to strings.
+ a = a + "", b = b + "";
+
+ // Interpolate pairs of numbers in a & b.
+ while ((am = reA.exec(a))
+ && (bm = reB.exec(b))) {
+ if ((bs = bm.index) > bi) { // a string precedes the next number in b
+ bs = b.slice(bi, bs);
+ if (s[i]) s[i] += bs; // coalesce with previous string
+ else s[++i] = bs;
+ }
+ if ((am = am[0]) === (bm = bm[0])) { // numbers in a & b match
+ if (s[i]) s[i] += bm; // coalesce with previous string
+ else s[++i] = bm;
+ } else { // interpolate non-matching numbers
+ s[++i] = null;
+ q.push({i: i, x: interpolateNumber(am, bm)});
+ }
+ bi = reB.lastIndex;
+ }
+
+ // Add remains of b.
+ if (bi < b.length) {
+ bs = b.slice(bi);
+ if (s[i]) s[i] += bs; // coalesce with previous string
+ else s[++i] = bs;
+ }
+
+ // Special optimization for only a single match.
+ // Otherwise, interpolate each of the numbers and rejoin the string.
+ return s.length < 2 ? (q[0]
+ ? one(q[0].x)
+ : zero(b))
+ : (b = q.length, function(t) {
+ for (var i = 0, o; i < b; ++i) s[(o = q[i]).i] = o.x(t);
+ return s.join("");
+ });
+};
+
+var interpolateValue = function(a, b) {
+ var t = typeof b, c;
+ return b == null || t === "boolean" ? constant$3(b)
+ : (t === "number" ? interpolateNumber
+ : t === "string" ? ((c = color(b)) ? (b = c, interpolateRgb) : interpolateString)
+ : b instanceof color ? interpolateRgb
+ : b instanceof Date ? date
+ : Array.isArray(b) ? array$2
+ : typeof b.valueOf !== "function" && typeof b.toString !== "function" || isNaN(b) ? object
+ : interpolateNumber)(a, b);
+};
+
+var interpolateRound = function(a, b) {
+ return a = +a, b -= a, function(t) {
+ return Math.round(a + b * t);
+ };
+};
+
+var degrees = 180 / Math.PI;
+
+var identity$2 = {
+ translateX: 0,
+ translateY: 0,
+ rotate: 0,
+ skewX: 0,
+ scaleX: 1,
+ scaleY: 1
+};
+
+var decompose = function(a, b, c, d, e, f) {
+ var scaleX, scaleY, skewX;
+ if (scaleX = Math.sqrt(a * a + b * b)) a /= scaleX, b /= scaleX;
+ if (skewX = a * c + b * d) c -= a * skewX, d -= b * skewX;
+ if (scaleY = Math.sqrt(c * c + d * d)) c /= scaleY, d /= scaleY, skewX /= scaleY;
+ if (a * d < b * c) a = -a, b = -b, skewX = -skewX, scaleX = -scaleX;
+ return {
+ translateX: e,
+ translateY: f,
+ rotate: Math.atan2(b, a) * degrees,
+ skewX: Math.atan(skewX) * degrees,
+ scaleX: scaleX,
+ scaleY: scaleY
+ };
+};
+
+var cssNode;
+var cssRoot;
+var cssView;
+var svgNode;
+
+function parseCss(value) {
+ if (value === "none") return identity$2;
+ if (!cssNode) cssNode = document.createElement("DIV"), cssRoot = document.documentElement, cssView = document.defaultView;
+ cssNode.style.transform = value;
+ value = cssView.getComputedStyle(cssRoot.appendChild(cssNode), null).getPropertyValue("transform");
+ cssRoot.removeChild(cssNode);
+ value = value.slice(7, -1).split(",");
+ return decompose(+value[0], +value[1], +value[2], +value[3], +value[4], +value[5]);
+}
+
+function parseSvg(value) {
+ if (value == null) return identity$2;
+ if (!svgNode) svgNode = document.createElementNS("http://www.w3.org/2000/svg", "g");
+ svgNode.setAttribute("transform", value);
+ if (!(value = svgNode.transform.baseVal.consolidate())) return identity$2;
+ value = value.matrix;
+ return decompose(value.a, value.b, value.c, value.d, value.e, value.f);
+}
+
+function interpolateTransform(parse, pxComma, pxParen, degParen) {
+
+ function pop(s) {
+ return s.length ? s.pop() + " " : "";
+ }
+
+ function translate(xa, ya, xb, yb, s, q) {
+ if (xa !== xb || ya !== yb) {
+ var i = s.push("translate(", null, pxComma, null, pxParen);
+ q.push({i: i - 4, x: interpolateNumber(xa, xb)}, {i: i - 2, x: interpolateNumber(ya, yb)});
+ } else if (xb || yb) {
+ s.push("translate(" + xb + pxComma + yb + pxParen);
+ }
+ }
+
+ function rotate(a, b, s, q) {
+ if (a !== b) {
+ if (a - b > 180) b += 360; else if (b - a > 180) a += 360; // shortest path
+ q.push({i: s.push(pop(s) + "rotate(", null, degParen) - 2, x: interpolateNumber(a, b)});
+ } else if (b) {
+ s.push(pop(s) + "rotate(" + b + degParen);
+ }
+ }
+
+ function skewX(a, b, s, q) {
+ if (a !== b) {
+ q.push({i: s.push(pop(s) + "skewX(", null, degParen) - 2, x: interpolateNumber(a, b)});
+ } else if (b) {
+ s.push(pop(s) + "skewX(" + b + degParen);
+ }
+ }
+
+ function scale(xa, ya, xb, yb, s, q) {
+ if (xa !== xb || ya !== yb) {
+ var i = s.push(pop(s) + "scale(", null, ",", null, ")");
+ q.push({i: i - 4, x: interpolateNumber(xa, xb)}, {i: i - 2, x: interpolateNumber(ya, yb)});
+ } else if (xb !== 1 || yb !== 1) {
+ s.push(pop(s) + "scale(" + xb + "," + yb + ")");
+ }
+ }
+
+ return function(a, b) {
+ var s = [], // string constants and placeholders
+ q = []; // number interpolators
+ a = parse(a), b = parse(b);
+ translate(a.translateX, a.translateY, b.translateX, b.translateY, s, q);
+ rotate(a.rotate, b.rotate, s, q);
+ skewX(a.skewX, b.skewX, s, q);
+ scale(a.scaleX, a.scaleY, b.scaleX, b.scaleY, s, q);
+ a = b = null; // gc
+ return function(t) {
+ var i = -1, n = q.length, o;
+ while (++i < n) s[(o = q[i]).i] = o.x(t);
+ return s.join("");
+ };
+ };
+}
+
+var interpolateTransformCss = interpolateTransform(parseCss, "px, ", "px)", "deg)");
+var interpolateTransformSvg = interpolateTransform(parseSvg, ", ", ")", ")");
+
+var rho = Math.SQRT2;
+
+function cubehelix$1(hue$$1) {
+ return (function cubehelixGamma(y) {
+ y = +y;
+
+ function cubehelix$$1(start, end) {
+ var h = hue$$1((start = cubehelix(start)).h, (end = cubehelix(end)).h),
+ s = nogamma(start.s, end.s),
+ l = nogamma(start.l, end.l),
+ opacity = nogamma(start.opacity, end.opacity);
+ return function(t) {
+ start.h = h(t);
+ start.s = s(t);
+ start.l = l(Math.pow(t, y));
+ start.opacity = opacity(t);
+ return start + "";
+ };
+ }
+
+ cubehelix$$1.gamma = cubehelixGamma;
+
+ return cubehelix$$1;
+ })(1);
+}
+
+cubehelix$1(hue);
+var cubehelixLong = cubehelix$1(nogamma);
+
+var constant$4 = function(x) {
+ return function() {
+ return x;
+ };
+};
+
+var number$1 = function(x) {
+ return +x;
+};
+
+var unit = [0, 1];
+
+function deinterpolateLinear(a, b) {
+ return (b -= (a = +a))
+ ? function(x) { return (x - a) / b; }
+ : constant$4(b);
+}
+
+function deinterpolateClamp(deinterpolate) {
+ return function(a, b) {
+ var d = deinterpolate(a = +a, b = +b);
+ return function(x) { return x <= a ? 0 : x >= b ? 1 : d(x); };
+ };
+}
+
+function reinterpolateClamp(reinterpolate) {
+ return function(a, b) {
+ var r = reinterpolate(a = +a, b = +b);
+ return function(t) { return t <= 0 ? a : t >= 1 ? b : r(t); };
+ };
+}
+
+function bimap(domain, range, deinterpolate, reinterpolate) {
+ var d0 = domain[0], d1 = domain[1], r0 = range[0], r1 = range[1];
+ if (d1 < d0) d0 = deinterpolate(d1, d0), r0 = reinterpolate(r1, r0);
+ else d0 = deinterpolate(d0, d1), r0 = reinterpolate(r0, r1);
+ return function(x) { return r0(d0(x)); };
+}
+
+function polymap(domain, range, deinterpolate, reinterpolate) {
+ var j = Math.min(domain.length, range.length) - 1,
+ d = new Array(j),
+ r = new Array(j),
+ i = -1;
+
+ // Reverse descending domains.
+ if (domain[j] < domain[0]) {
+ domain = domain.slice().reverse();
+ range = range.slice().reverse();
+ }
+
+ while (++i < j) {
+ d[i] = deinterpolate(domain[i], domain[i + 1]);
+ r[i] = reinterpolate(range[i], range[i + 1]);
+ }
+
+ return function(x) {
+ var i = bisectRight(domain, x, 1, j) - 1;
+ return r[i](d[i](x));
+ };
+}
+
+function copy(source, target) {
+ return target
+ .domain(source.domain())
+ .range(source.range())
+ .interpolate(source.interpolate())
+ .clamp(source.clamp());
+}
+
+// deinterpolate(a, b)(x) takes a domain value x in [a,b] and returns the corresponding parameter t in [0,1].
+// reinterpolate(a, b)(t) takes a parameter t in [0,1] and returns the corresponding domain value x in [a,b].
+function continuous(deinterpolate, reinterpolate) {
+ var domain = unit,
+ range = unit,
+ interpolate$$1 = interpolateValue,
+ clamp = false,
+ piecewise,
+ output,
+ input;
+
+ function rescale() {
+ piecewise = Math.min(domain.length, range.length) > 2 ? polymap : bimap;
+ output = input = null;
+ return scale;
+ }
+
+ function scale(x) {
+ return (output || (output = piecewise(domain, range, clamp ? deinterpolateClamp(deinterpolate) : deinterpolate, interpolate$$1)))(+x);
+ }
+
+ scale.invert = function(y) {
+ return (input || (input = piecewise(range, domain, deinterpolateLinear, clamp ? reinterpolateClamp(reinterpolate) : reinterpolate)))(+y);
+ };
+
+ scale.domain = function(_) {
+ return arguments.length ? (domain = map$3.call(_, number$1), rescale()) : domain.slice();
+ };
+
+ scale.range = function(_) {
+ return arguments.length ? (range = slice$2.call(_), rescale()) : range.slice();
+ };
+
+ scale.rangeRound = function(_) {
+ return range = slice$2.call(_), interpolate$$1 = interpolateRound, rescale();
+ };
+
+ scale.clamp = function(_) {
+ return arguments.length ? (clamp = !!_, rescale()) : clamp;
+ };
+
+ scale.interpolate = function(_) {
+ return arguments.length ? (interpolate$$1 = _, rescale()) : interpolate$$1;
+ };
+
+ return rescale();
+}
+
+// Computes the decimal coefficient and exponent of the specified number x with
+// significant digits p, where x is positive and p is in [1, 21] or undefined.
+// For example, formatDecimal(1.23) returns ["123", 0].
+var formatDecimal = function(x, p) {
+ if ((i = (x = p ? x.toExponential(p - 1) : x.toExponential()).indexOf("e")) < 0) return null; // NaN, ±Infinity
+ var i, coefficient = x.slice(0, i);
+
+ // The string returned by toExponential either has the form \d\.\d+e[-+]\d+
+ // (e.g., 1.2e+3) or the form \de[-+]\d+ (e.g., 1e+3).
+ return [
+ coefficient.length > 1 ? coefficient[0] + coefficient.slice(2) : coefficient,
+ +x.slice(i + 1)
+ ];
+};
+
+var exponent = function(x) {
+ return x = formatDecimal(Math.abs(x)), x ? x[1] : NaN;
+};
+
+var formatGroup = function(grouping, thousands) {
+ return function(value, width) {
+ var i = value.length,
+ t = [],
+ j = 0,
+ g = grouping[0],
+ length = 0;
+
+ while (i > 0 && g > 0) {
+ if (length + g + 1 > width) g = Math.max(1, width - length);
+ t.push(value.substring(i -= g, i + g));
+ if ((length += g + 1) > width) break;
+ g = grouping[j = (j + 1) % grouping.length];
+ }
+
+ return t.reverse().join(thousands);
+ };
+};
+
+var formatNumerals = function(numerals) {
+ return function(value) {
+ return value.replace(/[0-9]/g, function(i) {
+ return numerals[+i];
+ });
+ };
+};
+
+var formatDefault = function(x, p) {
+ x = x.toPrecision(p);
+
+ out: for (var n = x.length, i = 1, i0 = -1, i1; i < n; ++i) {
+ switch (x[i]) {
+ case ".": i0 = i1 = i; break;
+ case "0": if (i0 === 0) i0 = i; i1 = i; break;
+ case "e": break out;
+ default: if (i0 > 0) i0 = 0; break;
+ }
+ }
+
+ return i0 > 0 ? x.slice(0, i0) + x.slice(i1 + 1) : x;
+};
+
+var prefixExponent;
+
+var formatPrefixAuto = function(x, p) {
+ var d = formatDecimal(x, p);
+ if (!d) return x + "";
+ var coefficient = d[0],
+ exponent = d[1],
+ i = exponent - (prefixExponent = Math.max(-8, Math.min(8, Math.floor(exponent / 3))) * 3) + 1,
+ n = coefficient.length;
+ return i === n ? coefficient
+ : i > n ? coefficient + new Array(i - n + 1).join("0")
+ : i > 0 ? coefficient.slice(0, i) + "." + coefficient.slice(i)
+ : "0." + new Array(1 - i).join("0") + formatDecimal(x, Math.max(0, p + i - 1))[0]; // less than 1y!
+};
+
+var formatRounded = function(x, p) {
+ var d = formatDecimal(x, p);
+ if (!d) return x + "";
+ var coefficient = d[0],
+ exponent = d[1];
+ return exponent < 0 ? "0." + new Array(-exponent).join("0") + coefficient
+ : coefficient.length > exponent + 1 ? coefficient.slice(0, exponent + 1) + "." + coefficient.slice(exponent + 1)
+ : coefficient + new Array(exponent - coefficient.length + 2).join("0");
+};
+
+var formatTypes = {
+ "": formatDefault,
+ "%": function(x, p) { return (x * 100).toFixed(p); },
+ "b": function(x) { return Math.round(x).toString(2); },
+ "c": function(x) { return x + ""; },
+ "d": function(x) { return Math.round(x).toString(10); },
+ "e": function(x, p) { return x.toExponential(p); },
+ "f": function(x, p) { return x.toFixed(p); },
+ "g": function(x, p) { return x.toPrecision(p); },
+ "o": function(x) { return Math.round(x).toString(8); },
+ "p": function(x, p) { return formatRounded(x * 100, p); },
+ "r": formatRounded,
+ "s": formatPrefixAuto,
+ "X": function(x) { return Math.round(x).toString(16).toUpperCase(); },
+ "x": function(x) { return Math.round(x).toString(16); }
+};
+
+// [[fill]align][sign][symbol][0][width][,][.precision][type]
+var re = /^(?:(.)?([<>=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?([a-z%])?$/i;
+
+function formatSpecifier(specifier) {
+ return new FormatSpecifier(specifier);
+}
+
+formatSpecifier.prototype = FormatSpecifier.prototype; // instanceof
+
+function FormatSpecifier(specifier) {
+ if (!(match = re.exec(specifier))) throw new Error("invalid format: " + specifier);
+
+ var match,
+ fill = match[1] || " ",
+ align = match[2] || ">",
+ sign = match[3] || "-",
+ symbol = match[4] || "",
+ zero = !!match[5],
+ width = match[6] && +match[6],
+ comma = !!match[7],
+ precision = match[8] && +match[8].slice(1),
+ type = match[9] || "";
+
+ // The "n" type is an alias for ",g".
+ if (type === "n") comma = true, type = "g";
+
+ // Map invalid types to the default format.
+ else if (!formatTypes[type]) type = "";
+
+ // If zero fill is specified, padding goes after sign and before digits.
+ if (zero || (fill === "0" && align === "=")) zero = true, fill = "0", align = "=";
+
+ this.fill = fill;
+ this.align = align;
+ this.sign = sign;
+ this.symbol = symbol;
+ this.zero = zero;
+ this.width = width;
+ this.comma = comma;
+ this.precision = precision;
+ this.type = type;
+}
+
+FormatSpecifier.prototype.toString = function() {
+ return this.fill
+ + this.align
+ + this.sign
+ + this.symbol
+ + (this.zero ? "0" : "")
+ + (this.width == null ? "" : Math.max(1, this.width | 0))
+ + (this.comma ? "," : "")
+ + (this.precision == null ? "" : "." + Math.max(0, this.precision | 0))
+ + this.type;
+};
+
+var identity$3 = function(x) {
+ return x;
+};
+
+var prefixes = ["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];
+
+var formatLocale = function(locale) {
+ var group = locale.grouping && locale.thousands ? formatGroup(locale.grouping, locale.thousands) : identity$3,
+ currency = locale.currency,
+ decimal = locale.decimal,
+ numerals = locale.numerals ? formatNumerals(locale.numerals) : identity$3,
+ percent = locale.percent || "%";
+
+ function newFormat(specifier) {
+ specifier = formatSpecifier(specifier);
+
+ var fill = specifier.fill,
+ align = specifier.align,
+ sign = specifier.sign,
+ symbol = specifier.symbol,
+ zero = specifier.zero,
+ width = specifier.width,
+ comma = specifier.comma,
+ precision = specifier.precision,
+ type = specifier.type;
+
+ // Compute the prefix and suffix.
+ // For SI-prefix, the suffix is lazily computed.
+ var prefix = symbol === "$" ? currency[0] : symbol === "#" && /[boxX]/.test(type) ? "0" + type.toLowerCase() : "",
+ suffix = symbol === "$" ? currency[1] : /[%p]/.test(type) ? percent : "";
+
+ // What format function should we use?
+ // Is this an integer type?
+ // Can this type generate exponential notation?
+ var formatType = formatTypes[type],
+ maybeSuffix = !type || /[defgprs%]/.test(type);
+
+ // Set the default precision if not specified,
+ // or clamp the specified precision to the supported range.
+ // For significant precision, it must be in [1, 21].
+ // For fixed precision, it must be in [0, 20].
+ precision = precision == null ? (type ? 6 : 12)
+ : /[gprs]/.test(type) ? Math.max(1, Math.min(21, precision))
+ : Math.max(0, Math.min(20, precision));
+
+ function format(value) {
+ var valuePrefix = prefix,
+ valueSuffix = suffix,
+ i, n, c;
+
+ if (type === "c") {
+ valueSuffix = formatType(value) + valueSuffix;
+ value = "";
+ } else {
+ value = +value;
+
+ // Perform the initial formatting.
+ var valueNegative = value < 0;
+ value = formatType(Math.abs(value), precision);
+
+ // If a negative value rounds to zero during formatting, treat as positive.
+ if (valueNegative && +value === 0) valueNegative = false;
+
+ // Compute the prefix and suffix.
+ valuePrefix = (valueNegative ? (sign === "(" ? sign : "-") : sign === "-" || sign === "(" ? "" : sign) + valuePrefix;
+ valueSuffix = valueSuffix + (type === "s" ? prefixes[8 + prefixExponent / 3] : "") + (valueNegative && sign === "(" ? ")" : "");
+
+ // Break the formatted value into the integer “value” part that can be
+ // grouped, and fractional or exponential “suffix” part that is not.
+ if (maybeSuffix) {
+ i = -1, n = value.length;
+ while (++i < n) {
+ if (c = value.charCodeAt(i), 48 > c || c > 57) {
+ valueSuffix = (c === 46 ? decimal + value.slice(i + 1) : value.slice(i)) + valueSuffix;
+ value = value.slice(0, i);
+ break;
+ }
+ }
+ }
+ }
+
+ // If the fill character is not "0", grouping is applied before padding.
+ if (comma && !zero) value = group(value, Infinity);
+
+ // Compute the padding.
+ var length = valuePrefix.length + value.length + valueSuffix.length,
+ padding = length < width ? new Array(width - length + 1).join(fill) : "";
+
+ // If the fill character is "0", grouping is applied after padding.
+ if (comma && zero) value = group(padding + value, padding.length ? width - valueSuffix.length : Infinity), padding = "";
+
+ // Reconstruct the final output based on the desired alignment.
+ switch (align) {
+ case "<": value = valuePrefix + value + valueSuffix + padding; break;
+ case "=": value = valuePrefix + padding + value + valueSuffix; break;
+ case "^": value = padding.slice(0, length = padding.length >> 1) + valuePrefix + value + valueSuffix + padding.slice(length); break;
+ default: value = padding + valuePrefix + value + valueSuffix; break;
+ }
+
+ return numerals(value);
+ }
+
+ format.toString = function() {
+ return specifier + "";
+ };
+
+ return format;
+ }
+
+ function formatPrefix(specifier, value) {
+ var f = newFormat((specifier = formatSpecifier(specifier), specifier.type = "f", specifier)),
+ e = Math.max(-8, Math.min(8, Math.floor(exponent(value) / 3))) * 3,
+ k = Math.pow(10, -e),
+ prefix = prefixes[8 + e / 3];
+ return function(value) {
+ return f(k * value) + prefix;
+ };
+ }
+
+ return {
+ format: newFormat,
+ formatPrefix: formatPrefix
+ };
+};
+
+var locale;
+
+var formatPrefix;
+
+defaultLocale({
+ decimal: ".",
+ thousands: ",",
+ grouping: [3],
+ currency: ["$", ""]
+});
+
+function defaultLocale(definition) {
+ locale = formatLocale(definition);
+ exports.format = locale.format;
+ formatPrefix = locale.formatPrefix;
+ return locale;
+}
+
+var precisionFixed = function(step) {
+ return Math.max(0, -exponent(Math.abs(step)));
+};
+
+var precisionPrefix = function(step, value) {
+ return Math.max(0, Math.max(-8, Math.min(8, Math.floor(exponent(value) / 3))) * 3 - exponent(Math.abs(step)));
+};
+
+var precisionRound = function(step, max) {
+ step = Math.abs(step), max = Math.abs(max) - step;
+ return Math.max(0, exponent(max) - exponent(step)) + 1;
+};
+
+var tickFormat = function(domain, count, specifier) {
+ var start = domain[0],
+ stop = domain[domain.length - 1],
+ step = tickStep(start, stop, count == null ? 10 : count),
+ precision;
+ specifier = formatSpecifier(specifier == null ? ",f" : specifier);
+ switch (specifier.type) {
+ case "s": {
+ var value = Math.max(Math.abs(start), Math.abs(stop));
+ if (specifier.precision == null && !isNaN(precision = precisionPrefix(step, value))) specifier.precision = precision;
+ return formatPrefix(specifier, value);
+ }
+ case "":
+ case "e":
+ case "g":
+ case "p":
+ case "r": {
+ if (specifier.precision == null && !isNaN(precision = precisionRound(step, Math.max(Math.abs(start), Math.abs(stop))))) specifier.precision = precision - (specifier.type === "e");
+ break;
+ }
+ case "f":
+ case "%": {
+ if (specifier.precision == null && !isNaN(precision = precisionFixed(step))) specifier.precision = precision - (specifier.type === "%") * 2;
+ break;
+ }
+ }
+ return exports.format(specifier);
+};
+
+function linearish(scale) {
+ var domain = scale.domain;
+
+ scale.ticks = function(count) {
+ var d = domain();
+ return ticks(d[0], d[d.length - 1], count == null ? 10 : count);
+ };
+
+ scale.tickFormat = function(count, specifier) {
+ return tickFormat(domain(), count, specifier);
+ };
+
+ scale.nice = function(count) {
+ if (count == null) count = 10;
+
+ var d = domain(),
+ i0 = 0,
+ i1 = d.length - 1,
+ start = d[i0],
+ stop = d[i1],
+ step;
+
+ if (stop < start) {
+ step = start, start = stop, stop = step;
+ step = i0, i0 = i1, i1 = step;
+ }
+
+ step = tickIncrement(start, stop, count);
+
+ if (step > 0) {
+ start = Math.floor(start / step) * step;
+ stop = Math.ceil(stop / step) * step;
+ step = tickIncrement(start, stop, count);
+ } else if (step < 0) {
+ start = Math.ceil(start * step) / step;
+ stop = Math.floor(stop * step) / step;
+ step = tickIncrement(start, stop, count);
+ }
+
+ if (step > 0) {
+ d[i0] = Math.floor(start / step) * step;
+ d[i1] = Math.ceil(stop / step) * step;
+ domain(d);
+ } else if (step < 0) {
+ d[i0] = Math.ceil(start * step) / step;
+ d[i1] = Math.floor(stop * step) / step;
+ domain(d);
+ }
+
+ return scale;
+ };
+
+ return scale;
+}
+
+function linear() {
+ var scale = continuous(deinterpolateLinear, interpolateNumber);
+
+ scale.copy = function() {
+ return copy(scale, linear());
+ };
+
+ return linearish(scale);
+}
+
+var t0$1 = new Date;
+var t1$1 = new Date;
+
+function newInterval(floori, offseti, count, field) {
+
+ function interval(date) {
+ return floori(date = new Date(+date)), date;
+ }
+
+ interval.floor = interval;
+
+ interval.ceil = function(date) {
+ return floori(date = new Date(date - 1)), offseti(date, 1), floori(date), date;
+ };
+
+ interval.round = function(date) {
+ var d0 = interval(date),
+ d1 = interval.ceil(date);
+ return date - d0 < d1 - date ? d0 : d1;
+ };
+
+ interval.offset = function(date, step) {
+ return offseti(date = new Date(+date), step == null ? 1 : Math.floor(step)), date;
+ };
+
+ interval.range = function(start, stop, step) {
+ var range = [];
+ start = interval.ceil(start);
+ step = step == null ? 1 : Math.floor(step);
+ if (!(start < stop) || !(step > 0)) return range; // also handles Invalid Date
+ do range.push(new Date(+start)); while (offseti(start, step), floori(start), start < stop)
+ return range;
+ };
+
+ interval.filter = function(test) {
+ return newInterval(function(date) {
+ if (date >= date) while (floori(date), !test(date)) date.setTime(date - 1);
+ }, function(date, step) {
+ if (date >= date) {
+ if (step < 0) while (++step <= 0) {
+ while (offseti(date, -1), !test(date)) {} // eslint-disable-line no-empty
+ } else while (--step >= 0) {
+ while (offseti(date, +1), !test(date)) {} // eslint-disable-line no-empty
+ }
+ }
+ });
+ };
+
+ if (count) {
+ interval.count = function(start, end) {
+ t0$1.setTime(+start), t1$1.setTime(+end);
+ floori(t0$1), floori(t1$1);
+ return Math.floor(count(t0$1, t1$1));
+ };
+
+ interval.every = function(step) {
+ step = Math.floor(step);
+ return !isFinite(step) || !(step > 0) ? null
+ : !(step > 1) ? interval
+ : interval.filter(field
+ ? function(d) { return field(d) % step === 0; }
+ : function(d) { return interval.count(0, d) % step === 0; });
+ };
+ }
+
+ return interval;
+}
+
+var millisecond = newInterval(function() {
+ // noop
+}, function(date, step) {
+ date.setTime(+date + step);
+}, function(start, end) {
+ return end - start;
+});
+
+// An optimized implementation for this simple case.
+millisecond.every = function(k) {
+ k = Math.floor(k);
+ if (!isFinite(k) || !(k > 0)) return null;
+ if (!(k > 1)) return millisecond;
+ return newInterval(function(date) {
+ date.setTime(Math.floor(date / k) * k);
+ }, function(date, step) {
+ date.setTime(+date + step * k);
+ }, function(start, end) {
+ return (end - start) / k;
+ });
+};
+
+var durationSecond$1 = 1e3;
+var durationMinute$1 = 6e4;
+var durationHour$1 = 36e5;
+var durationDay$1 = 864e5;
+var durationWeek$1 = 6048e5;
+
+var second = newInterval(function(date) {
+ date.setTime(Math.floor(date / durationSecond$1) * durationSecond$1);
+}, function(date, step) {
+ date.setTime(+date + step * durationSecond$1);
+}, function(start, end) {
+ return (end - start) / durationSecond$1;
+}, function(date) {
+ return date.getUTCSeconds();
+});
+
+var minute = newInterval(function(date) {
+ date.setTime(Math.floor(date / durationMinute$1) * durationMinute$1);
+}, function(date, step) {
+ date.setTime(+date + step * durationMinute$1);
+}, function(start, end) {
+ return (end - start) / durationMinute$1;
+}, function(date) {
+ return date.getMinutes();
+});
+
+var hour = newInterval(function(date) {
+ var offset = date.getTimezoneOffset() * durationMinute$1 % durationHour$1;
+ if (offset < 0) offset += durationHour$1;
+ date.setTime(Math.floor((+date - offset) / durationHour$1) * durationHour$1 + offset);
+}, function(date, step) {
+ date.setTime(+date + step * durationHour$1);
+}, function(start, end) {
+ return (end - start) / durationHour$1;
+}, function(date) {
+ return date.getHours();
+});
+
+var day = newInterval(function(date) {
+ date.setHours(0, 0, 0, 0);
+}, function(date, step) {
+ date.setDate(date.getDate() + step);
+}, function(start, end) {
+ return (end - start - (end.getTimezoneOffset() - start.getTimezoneOffset()) * durationMinute$1) / durationDay$1;
+}, function(date) {
+ return date.getDate() - 1;
+});
+
+function weekday(i) {
+ return newInterval(function(date) {
+ date.setDate(date.getDate() - (date.getDay() + 7 - i) % 7);
+ date.setHours(0, 0, 0, 0);
+ }, function(date, step) {
+ date.setDate(date.getDate() + step * 7);
+ }, function(start, end) {
+ return (end - start - (end.getTimezoneOffset() - start.getTimezoneOffset()) * durationMinute$1) / durationWeek$1;
+ });
+}
+
+var sunday = weekday(0);
+var monday = weekday(1);
+var tuesday = weekday(2);
+var wednesday = weekday(3);
+var thursday = weekday(4);
+var friday = weekday(5);
+var saturday = weekday(6);
+
+var month = newInterval(function(date) {
+ date.setDate(1);
+ date.setHours(0, 0, 0, 0);
+}, function(date, step) {
+ date.setMonth(date.getMonth() + step);
+}, function(start, end) {
+ return end.getMonth() - start.getMonth() + (end.getFullYear() - start.getFullYear()) * 12;
+}, function(date) {
+ return date.getMonth();
+});
+
+var year = newInterval(function(date) {
+ date.setMonth(0, 1);
+ date.setHours(0, 0, 0, 0);
+}, function(date, step) {
+ date.setFullYear(date.getFullYear() + step);
+}, function(start, end) {
+ return end.getFullYear() - start.getFullYear();
+}, function(date) {
+ return date.getFullYear();
+});
+
+// An optimized implementation for this simple case.
+year.every = function(k) {
+ return !isFinite(k = Math.floor(k)) || !(k > 0) ? null : newInterval(function(date) {
+ date.setFullYear(Math.floor(date.getFullYear() / k) * k);
+ date.setMonth(0, 1);
+ date.setHours(0, 0, 0, 0);
+ }, function(date, step) {
+ date.setFullYear(date.getFullYear() + step * k);
+ });
+};
+
+var utcMinute = newInterval(function(date) {
+ date.setUTCSeconds(0, 0);
+}, function(date, step) {
+ date.setTime(+date + step * durationMinute$1);
+}, function(start, end) {
+ return (end - start) / durationMinute$1;
+}, function(date) {
+ return date.getUTCMinutes();
+});
+
+var utcHour = newInterval(function(date) {
+ date.setUTCMinutes(0, 0, 0);
+}, function(date, step) {
+ date.setTime(+date + step * durationHour$1);
+}, function(start, end) {
+ return (end - start) / durationHour$1;
+}, function(date) {
+ return date.getUTCHours();
+});
+
+var utcDay = newInterval(function(date) {
+ date.setUTCHours(0, 0, 0, 0);
+}, function(date, step) {
+ date.setUTCDate(date.getUTCDate() + step);
+}, function(start, end) {
+ return (end - start) / durationDay$1;
+}, function(date) {
+ return date.getUTCDate() - 1;
+});
+
+function utcWeekday(i) {
+ return newInterval(function(date) {
+ date.setUTCDate(date.getUTCDate() - (date.getUTCDay() + 7 - i) % 7);
+ date.setUTCHours(0, 0, 0, 0);
+ }, function(date, step) {
+ date.setUTCDate(date.getUTCDate() + step * 7);
+ }, function(start, end) {
+ return (end - start) / durationWeek$1;
+ });
+}
+
+var utcSunday = utcWeekday(0);
+var utcMonday = utcWeekday(1);
+var utcTuesday = utcWeekday(2);
+var utcWednesday = utcWeekday(3);
+var utcThursday = utcWeekday(4);
+var utcFriday = utcWeekday(5);
+var utcSaturday = utcWeekday(6);
+
+var utcMonth = newInterval(function(date) {
+ date.setUTCDate(1);
+ date.setUTCHours(0, 0, 0, 0);
+}, function(date, step) {
+ date.setUTCMonth(date.getUTCMonth() + step);
+}, function(start, end) {
+ return end.getUTCMonth() - start.getUTCMonth() + (end.getUTCFullYear() - start.getUTCFullYear()) * 12;
+}, function(date) {
+ return date.getUTCMonth();
+});
+
+var utcYear = newInterval(function(date) {
+ date.setUTCMonth(0, 1);
+ date.setUTCHours(0, 0, 0, 0);
+}, function(date, step) {
+ date.setUTCFullYear(date.getUTCFullYear() + step);
+}, function(start, end) {
+ return end.getUTCFullYear() - start.getUTCFullYear();
+}, function(date) {
+ return date.getUTCFullYear();
+});
+
+// An optimized implementation for this simple case.
+utcYear.every = function(k) {
+ return !isFinite(k = Math.floor(k)) || !(k > 0) ? null : newInterval(function(date) {
+ date.setUTCFullYear(Math.floor(date.getUTCFullYear() / k) * k);
+ date.setUTCMonth(0, 1);
+ date.setUTCHours(0, 0, 0, 0);
+ }, function(date, step) {
+ date.setUTCFullYear(date.getUTCFullYear() + step * k);
+ });
+};
+
+function localDate(d) {
+ if (0 <= d.y && d.y < 100) {
+ var date = new Date(-1, d.m, d.d, d.H, d.M, d.S, d.L);
+ date.setFullYear(d.y);
+ return date;
+ }
+ return new Date(d.y, d.m, d.d, d.H, d.M, d.S, d.L);
+}
+
+function utcDate(d) {
+ if (0 <= d.y && d.y < 100) {
+ var date = new Date(Date.UTC(-1, d.m, d.d, d.H, d.M, d.S, d.L));
+ date.setUTCFullYear(d.y);
+ return date;
+ }
+ return new Date(Date.UTC(d.y, d.m, d.d, d.H, d.M, d.S, d.L));
+}
+
+function newYear(y) {
+ return {y: y, m: 0, d: 1, H: 0, M: 0, S: 0, L: 0};
+}
+
+function formatLocale$1(locale) {
+ var locale_dateTime = locale.dateTime,
+ locale_date = locale.date,
+ locale_time = locale.time,
+ locale_periods = locale.periods,
+ locale_weekdays = locale.days,
+ locale_shortWeekdays = locale.shortDays,
+ locale_months = locale.months,
+ locale_shortMonths = locale.shortMonths;
+
+ var periodRe = formatRe(locale_periods),
+ periodLookup = formatLookup(locale_periods),
+ weekdayRe = formatRe(locale_weekdays),
+ weekdayLookup = formatLookup(locale_weekdays),
+ shortWeekdayRe = formatRe(locale_shortWeekdays),
+ shortWeekdayLookup = formatLookup(locale_shortWeekdays),
+ monthRe = formatRe(locale_months),
+ monthLookup = formatLookup(locale_months),
+ shortMonthRe = formatRe(locale_shortMonths),
+ shortMonthLookup = formatLookup(locale_shortMonths);
+
+ var formats = {
+ "a": formatShortWeekday,
+ "A": formatWeekday,
+ "b": formatShortMonth,
+ "B": formatMonth,
+ "c": null,
+ "d": formatDayOfMonth,
+ "e": formatDayOfMonth,
+ "f": formatMicroseconds,
+ "H": formatHour24,
+ "I": formatHour12,
+ "j": formatDayOfYear,
+ "L": formatMilliseconds,
+ "m": formatMonthNumber,
+ "M": formatMinutes,
+ "p": formatPeriod,
+ "Q": formatUnixTimestamp,
+ "s": formatUnixTimestampSeconds,
+ "S": formatSeconds,
+ "u": formatWeekdayNumberMonday,
+ "U": formatWeekNumberSunday,
+ "V": formatWeekNumberISO,
+ "w": formatWeekdayNumberSunday,
+ "W": formatWeekNumberMonday,
+ "x": null,
+ "X": null,
+ "y": formatYear,
+ "Y": formatFullYear,
+ "Z": formatZone,
+ "%": formatLiteralPercent
+ };
+
+ var utcFormats = {
+ "a": formatUTCShortWeekday,
+ "A": formatUTCWeekday,
+ "b": formatUTCShortMonth,
+ "B": formatUTCMonth,
+ "c": null,
+ "d": formatUTCDayOfMonth,
+ "e": formatUTCDayOfMonth,
+ "f": formatUTCMicroseconds,
+ "H": formatUTCHour24,
+ "I": formatUTCHour12,
+ "j": formatUTCDayOfYear,
+ "L": formatUTCMilliseconds,
+ "m": formatUTCMonthNumber,
+ "M": formatUTCMinutes,
+ "p": formatUTCPeriod,
+ "Q": formatUnixTimestamp,
+ "s": formatUnixTimestampSeconds,
+ "S": formatUTCSeconds,
+ "u": formatUTCWeekdayNumberMonday,
+ "U": formatUTCWeekNumberSunday,
+ "V": formatUTCWeekNumberISO,
+ "w": formatUTCWeekdayNumberSunday,
+ "W": formatUTCWeekNumberMonday,
+ "x": null,
+ "X": null,
+ "y": formatUTCYear,
+ "Y": formatUTCFullYear,
+ "Z": formatUTCZone,
+ "%": formatLiteralPercent
+ };
+
+ var parses = {
+ "a": parseShortWeekday,
+ "A": parseWeekday,
+ "b": parseShortMonth,
+ "B": parseMonth,
+ "c": parseLocaleDateTime,
+ "d": parseDayOfMonth,
+ "e": parseDayOfMonth,
+ "f": parseMicroseconds,
+ "H": parseHour24,
+ "I": parseHour24,
+ "j": parseDayOfYear,
+ "L": parseMilliseconds,
+ "m": parseMonthNumber,
+ "M": parseMinutes,
+ "p": parsePeriod,
+ "Q": parseUnixTimestamp,
+ "s": parseUnixTimestampSeconds,
+ "S": parseSeconds,
+ "u": parseWeekdayNumberMonday,
+ "U": parseWeekNumberSunday,
+ "V": parseWeekNumberISO,
+ "w": parseWeekdayNumberSunday,
+ "W": parseWeekNumberMonday,
+ "x": parseLocaleDate,
+ "X": parseLocaleTime,
+ "y": parseYear,
+ "Y": parseFullYear,
+ "Z": parseZone,
+ "%": parseLiteralPercent
+ };
+
+ // These recursive directive definitions must be deferred.
+ formats.x = newFormat(locale_date, formats);
+ formats.X = newFormat(locale_time, formats);
+ formats.c = newFormat(locale_dateTime, formats);
+ utcFormats.x = newFormat(locale_date, utcFormats);
+ utcFormats.X = newFormat(locale_time, utcFormats);
+ utcFormats.c = newFormat(locale_dateTime, utcFormats);
+
+ function newFormat(specifier, formats) {
+ return function(date) {
+ var string = [],
+ i = -1,
+ j = 0,
+ n = specifier.length,
+ c,
+ pad,
+ format;
+
+ if (!(date instanceof Date)) date = new Date(+date);
+
+ while (++i < n) {
+ if (specifier.charCodeAt(i) === 37) {
+ string.push(specifier.slice(j, i));
+ if ((pad = pads[c = specifier.charAt(++i)]) != null) c = specifier.charAt(++i);
+ else pad = c === "e" ? " " : "0";
+ if (format = formats[c]) c = format(date, pad);
+ string.push(c);
+ j = i + 1;
+ }
+ }
+
+ string.push(specifier.slice(j, i));
+ return string.join("");
+ };
+ }
+
+ function newParse(specifier, newDate) {
+ return function(string) {
+ var d = newYear(1900),
+ i = parseSpecifier(d, specifier, string += "", 0),
+ week, day$$1;
+ if (i != string.length) return null;
+
+ // If a UNIX timestamp is specified, return it.
+ if ("Q" in d) return new Date(d.Q);
+
+ // The am-pm flag is 0 for AM, and 1 for PM.
+ if ("p" in d) d.H = d.H % 12 + d.p * 12;
+
+ // Convert day-of-week and week-of-year to day-of-year.
+ if ("V" in d) {
+ if (d.V < 1 || d.V > 53) return null;
+ if (!("w" in d)) d.w = 1;
+ if ("Z" in d) {
+ week = utcDate(newYear(d.y)), day$$1 = week.getUTCDay();
+ week = day$$1 > 4 || day$$1 === 0 ? utcMonday.ceil(week) : utcMonday(week);
+ week = utcDay.offset(week, (d.V - 1) * 7);
+ d.y = week.getUTCFullYear();
+ d.m = week.getUTCMonth();
+ d.d = week.getUTCDate() + (d.w + 6) % 7;
+ } else {
+ week = newDate(newYear(d.y)), day$$1 = week.getDay();
+ week = day$$1 > 4 || day$$1 === 0 ? monday.ceil(week) : monday(week);
+ week = day.offset(week, (d.V - 1) * 7);
+ d.y = week.getFullYear();
+ d.m = week.getMonth();
+ d.d = week.getDate() + (d.w + 6) % 7;
+ }
+ } else if ("W" in d || "U" in d) {
+ if (!("w" in d)) d.w = "u" in d ? d.u % 7 : "W" in d ? 1 : 0;
+ day$$1 = "Z" in d ? utcDate(newYear(d.y)).getUTCDay() : newDate(newYear(d.y)).getDay();
+ d.m = 0;
+ d.d = "W" in d ? (d.w + 6) % 7 + d.W * 7 - (day$$1 + 5) % 7 : d.w + d.U * 7 - (day$$1 + 6) % 7;
+ }
+
+ // If a time zone is specified, all fields are interpreted as UTC and then
+ // offset according to the specified time zone.
+ if ("Z" in d) {
+ d.H += d.Z / 100 | 0;
+ d.M += d.Z % 100;
+ return utcDate(d);
+ }
+
+ // Otherwise, all fields are in local time.
+ return newDate(d);
+ };
+ }
+
+ function parseSpecifier(d, specifier, string, j) {
+ var i = 0,
+ n = specifier.length,
+ m = string.length,
+ c,
+ parse;
+
+ while (i < n) {
+ if (j >= m) return -1;
+ c = specifier.charCodeAt(i++);
+ if (c === 37) {
+ c = specifier.charAt(i++);
+ parse = parses[c in pads ? specifier.charAt(i++) : c];
+ if (!parse || ((j = parse(d, string, j)) < 0)) return -1;
+ } else if (c != string.charCodeAt(j++)) {
+ return -1;
+ }
+ }
+
+ return j;
+ }
+
+ function parsePeriod(d, string, i) {
+ var n = periodRe.exec(string.slice(i));
+ return n ? (d.p = periodLookup[n[0].toLowerCase()], i + n[0].length) : -1;
+ }
+
+ function parseShortWeekday(d, string, i) {
+ var n = shortWeekdayRe.exec(string.slice(i));
+ return n ? (d.w = shortWeekdayLookup[n[0].toLowerCase()], i + n[0].length) : -1;
+ }
+
+ function parseWeekday(d, string, i) {
+ var n = weekdayRe.exec(string.slice(i));
+ return n ? (d.w = weekdayLookup[n[0].toLowerCase()], i + n[0].length) : -1;
+ }
+
+ function parseShortMonth(d, string, i) {
+ var n = shortMonthRe.exec(string.slice(i));
+ return n ? (d.m = shortMonthLookup[n[0].toLowerCase()], i + n[0].length) : -1;
+ }
+
+ function parseMonth(d, string, i) {
+ var n = monthRe.exec(string.slice(i));
+ return n ? (d.m = monthLookup[n[0].toLowerCase()], i + n[0].length) : -1;
+ }
+
+ function parseLocaleDateTime(d, string, i) {
+ return parseSpecifier(d, locale_dateTime, string, i);
+ }
+
+ function parseLocaleDate(d, string, i) {
+ return parseSpecifier(d, locale_date, string, i);
+ }
+
+ function parseLocaleTime(d, string, i) {
+ return parseSpecifier(d, locale_time, string, i);
+ }
+
+ function formatShortWeekday(d) {
+ return locale_shortWeekdays[d.getDay()];
+ }
+
+ function formatWeekday(d) {
+ return locale_weekdays[d.getDay()];
+ }
+
+ function formatShortMonth(d) {
+ return locale_shortMonths[d.getMonth()];
+ }
+
+ function formatMonth(d) {
+ return locale_months[d.getMonth()];
+ }
+
+ function formatPeriod(d) {
+ return locale_periods[+(d.getHours() >= 12)];
+ }
+
+ function formatUTCShortWeekday(d) {
+ return locale_shortWeekdays[d.getUTCDay()];
+ }
+
+ function formatUTCWeekday(d) {
+ return locale_weekdays[d.getUTCDay()];
+ }
+
+ function formatUTCShortMonth(d) {
+ return locale_shortMonths[d.getUTCMonth()];
+ }
+
+ function formatUTCMonth(d) {
+ return locale_months[d.getUTCMonth()];
+ }
+
+ function formatUTCPeriod(d) {
+ return locale_periods[+(d.getUTCHours() >= 12)];
+ }
+
+ return {
+ format: function(specifier) {
+ var f = newFormat(specifier += "", formats);
+ f.toString = function() { return specifier; };
+ return f;
+ },
+ parse: function(specifier) {
+ var p = newParse(specifier += "", localDate);
+ p.toString = function() { return specifier; };
+ return p;
+ },
+ utcFormat: function(specifier) {
+ var f = newFormat(specifier += "", utcFormats);
+ f.toString = function() { return specifier; };
+ return f;
+ },
+ utcParse: function(specifier) {
+ var p = newParse(specifier, utcDate);
+ p.toString = function() { return specifier; };
+ return p;
+ }
+ };
+}
+
+var pads = {"-": "", "_": " ", "0": "0"};
+var numberRe = /^\s*\d+/;
+var percentRe = /^%/;
+var requoteRe = /[\\^$*+?|[\]().{}]/g;
+
+function pad(value, fill, width) {
+ var sign = value < 0 ? "-" : "",
+ string = (sign ? -value : value) + "",
+ length = string.length;
+ return sign + (length < width ? new Array(width - length + 1).join(fill) + string : string);
+}
+
+function requote(s) {
+ return s.replace(requoteRe, "\\$&");
+}
+
+function formatRe(names) {
+ return new RegExp("^(?:" + names.map(requote).join("|") + ")", "i");
+}
+
+function formatLookup(names) {
+ var map = {}, i = -1, n = names.length;
+ while (++i < n) map[names[i].toLowerCase()] = i;
+ return map;
+}
+
+function parseWeekdayNumberSunday(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 1));
+ return n ? (d.w = +n[0], i + n[0].length) : -1;
+}
+
+function parseWeekdayNumberMonday(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 1));
+ return n ? (d.u = +n[0], i + n[0].length) : -1;
+}
+
+function parseWeekNumberSunday(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.U = +n[0], i + n[0].length) : -1;
+}
+
+function parseWeekNumberISO(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.V = +n[0], i + n[0].length) : -1;
+}
+
+function parseWeekNumberMonday(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.W = +n[0], i + n[0].length) : -1;
+}
+
+function parseFullYear(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 4));
+ return n ? (d.y = +n[0], i + n[0].length) : -1;
+}
+
+function parseYear(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.y = +n[0] + (+n[0] > 68 ? 1900 : 2000), i + n[0].length) : -1;
+}
+
+function parseZone(d, string, i) {
+ var n = /^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(string.slice(i, i + 6));
+ return n ? (d.Z = n[1] ? 0 : -(n[2] + (n[3] || "00")), i + n[0].length) : -1;
+}
+
+function parseMonthNumber(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.m = n[0] - 1, i + n[0].length) : -1;
+}
+
+function parseDayOfMonth(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.d = +n[0], i + n[0].length) : -1;
+}
+
+function parseDayOfYear(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 3));
+ return n ? (d.m = 0, d.d = +n[0], i + n[0].length) : -1;
+}
+
+function parseHour24(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.H = +n[0], i + n[0].length) : -1;
+}
+
+function parseMinutes(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.M = +n[0], i + n[0].length) : -1;
+}
+
+function parseSeconds(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 2));
+ return n ? (d.S = +n[0], i + n[0].length) : -1;
+}
+
+function parseMilliseconds(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 3));
+ return n ? (d.L = +n[0], i + n[0].length) : -1;
+}
+
+function parseMicroseconds(d, string, i) {
+ var n = numberRe.exec(string.slice(i, i + 6));
+ return n ? (d.L = Math.floor(n[0] / 1000), i + n[0].length) : -1;
+}
+
+function parseLiteralPercent(d, string, i) {
+ var n = percentRe.exec(string.slice(i, i + 1));
+ return n ? i + n[0].length : -1;
+}
+
+function parseUnixTimestamp(d, string, i) {
+ var n = numberRe.exec(string.slice(i));
+ return n ? (d.Q = +n[0], i + n[0].length) : -1;
+}
+
+function parseUnixTimestampSeconds(d, string, i) {
+ var n = numberRe.exec(string.slice(i));
+ return n ? (d.Q = (+n[0]) * 1000, i + n[0].length) : -1;
+}
+
+function formatDayOfMonth(d, p) {
+ return pad(d.getDate(), p, 2);
+}
+
+function formatHour24(d, p) {
+ return pad(d.getHours(), p, 2);
+}
+
+function formatHour12(d, p) {
+ return pad(d.getHours() % 12 || 12, p, 2);
+}
+
+function formatDayOfYear(d, p) {
+ return pad(1 + day.count(year(d), d), p, 3);
+}
+
+function formatMilliseconds(d, p) {
+ return pad(d.getMilliseconds(), p, 3);
+}
+
+function formatMicroseconds(d, p) {
+ return formatMilliseconds(d, p) + "000";
+}
+
+function formatMonthNumber(d, p) {
+ return pad(d.getMonth() + 1, p, 2);
+}
+
+function formatMinutes(d, p) {
+ return pad(d.getMinutes(), p, 2);
+}
+
+function formatSeconds(d, p) {
+ return pad(d.getSeconds(), p, 2);
+}
+
+function formatWeekdayNumberMonday(d) {
+ var day$$1 = d.getDay();
+ return day$$1 === 0 ? 7 : day$$1;
+}
+
+function formatWeekNumberSunday(d, p) {
+ return pad(sunday.count(year(d), d), p, 2);
+}
+
+function formatWeekNumberISO(d, p) {
+ var day$$1 = d.getDay();
+ d = (day$$1 >= 4 || day$$1 === 0) ? thursday(d) : thursday.ceil(d);
+ return pad(thursday.count(year(d), d) + (year(d).getDay() === 4), p, 2);
+}
+
+function formatWeekdayNumberSunday(d) {
+ return d.getDay();
+}
+
+function formatWeekNumberMonday(d, p) {
+ return pad(monday.count(year(d), d), p, 2);
+}
+
+function formatYear(d, p) {
+ return pad(d.getFullYear() % 100, p, 2);
+}
+
+function formatFullYear(d, p) {
+ return pad(d.getFullYear() % 10000, p, 4);
+}
+
+function formatZone(d) {
+ var z = d.getTimezoneOffset();
+ return (z > 0 ? "-" : (z *= -1, "+"))
+ + pad(z / 60 | 0, "0", 2)
+ + pad(z % 60, "0", 2);
+}
+
+function formatUTCDayOfMonth(d, p) {
+ return pad(d.getUTCDate(), p, 2);
+}
+
+function formatUTCHour24(d, p) {
+ return pad(d.getUTCHours(), p, 2);
+}
+
+function formatUTCHour12(d, p) {
+ return pad(d.getUTCHours() % 12 || 12, p, 2);
+}
+
+function formatUTCDayOfYear(d, p) {
+ return pad(1 + utcDay.count(utcYear(d), d), p, 3);
+}
+
+function formatUTCMilliseconds(d, p) {
+ return pad(d.getUTCMilliseconds(), p, 3);
+}
+
+function formatUTCMicroseconds(d, p) {
+ return formatUTCMilliseconds(d, p) + "000";
+}
+
+function formatUTCMonthNumber(d, p) {
+ return pad(d.getUTCMonth() + 1, p, 2);
+}
+
+function formatUTCMinutes(d, p) {
+ return pad(d.getUTCMinutes(), p, 2);
+}
+
+function formatUTCSeconds(d, p) {
+ return pad(d.getUTCSeconds(), p, 2);
+}
+
+function formatUTCWeekdayNumberMonday(d) {
+ var dow = d.getUTCDay();
+ return dow === 0 ? 7 : dow;
+}
+
+function formatUTCWeekNumberSunday(d, p) {
+ return pad(utcSunday.count(utcYear(d), d), p, 2);
+}
+
+function formatUTCWeekNumberISO(d, p) {
+ var day$$1 = d.getUTCDay();
+ d = (day$$1 >= 4 || day$$1 === 0) ? utcThursday(d) : utcThursday.ceil(d);
+ return pad(utcThursday.count(utcYear(d), d) + (utcYear(d).getUTCDay() === 4), p, 2);
+}
+
+function formatUTCWeekdayNumberSunday(d) {
+ return d.getUTCDay();
+}
+
+function formatUTCWeekNumberMonday(d, p) {
+ return pad(utcMonday.count(utcYear(d), d), p, 2);
+}
+
+function formatUTCYear(d, p) {
+ return pad(d.getUTCFullYear() % 100, p, 2);
+}
+
+function formatUTCFullYear(d, p) {
+ return pad(d.getUTCFullYear() % 10000, p, 4);
+}
+
+function formatUTCZone() {
+ return "+0000";
+}
+
+function formatLiteralPercent() {
+ return "%";
+}
+
+function formatUnixTimestamp(d) {
+ return +d;
+}
+
+function formatUnixTimestampSeconds(d) {
+ return Math.floor(+d / 1000);
+}
+
+var locale$1;
+var timeFormat;
+var timeParse;
+var utcFormat;
+var utcParse;
+
+defaultLocale$1({
+ dateTime: "%x, %X",
+ date: "%-m/%-d/%Y",
+ time: "%-I:%M:%S %p",
+ periods: ["AM", "PM"],
+ days: ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"],
+ shortDays: ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"],
+ months: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"],
+ shortMonths: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
+});
+
+function defaultLocale$1(definition) {
+ locale$1 = formatLocale$1(definition);
+ timeFormat = locale$1.format;
+ timeParse = locale$1.parse;
+ utcFormat = locale$1.utcFormat;
+ utcParse = locale$1.utcParse;
+ return locale$1;
+}
+
+var isoSpecifier = "%Y-%m-%dT%H:%M:%S.%LZ";
+
+function formatIsoNative(date) {
+ return date.toISOString();
+}
+
+var formatIso = Date.prototype.toISOString
+ ? formatIsoNative
+ : utcFormat(isoSpecifier);
+
+function parseIsoNative(string) {
+ var date = new Date(string);
+ return isNaN(date) ? null : date;
+}
+
+var parseIso = +new Date("2000-01-01T00:00:00.000Z")
+ ? parseIsoNative
+ : utcParse(isoSpecifier);
+
+var colors = function(s) {
+ return s.match(/.{6}/g).map(function(x) {
+ return "#" + x;
+ });
+};
+
+colors("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf");
+
+colors("393b795254a36b6ecf9c9ede6379398ca252b5cf6bcedb9c8c6d31bd9e39e7ba52e7cb94843c39ad494ad6616be7969c7b4173a55194ce6dbdde9ed6");
+
+colors("3182bd6baed69ecae1c6dbefe6550dfd8d3cfdae6bfdd0a231a35474c476a1d99bc7e9c0756bb19e9ac8bcbddcdadaeb636363969696bdbdbdd9d9d9");
+
+colors("1f77b4aec7e8ff7f0effbb782ca02c98df8ad62728ff98969467bdc5b0d58c564bc49c94e377c2f7b6d27f7f7fc7c7c7bcbd22dbdb8d17becf9edae5");
+
+cubehelixLong(cubehelix(300, 0.5, 0.0), cubehelix(-240, 0.5, 1.0));
+
+var warm = cubehelixLong(cubehelix(-100, 0.75, 0.35), cubehelix(80, 1.50, 0.8));
+
+var cool = cubehelixLong(cubehelix(260, 0.75, 0.35), cubehelix(80, 1.50, 0.8));
+
+var rainbow = cubehelix();
+
+function ramp(range) {
+ var n = range.length;
+ return function(t) {
+ return range[Math.max(0, Math.min(n - 1, Math.floor(t * n)))];
+ };
+}
+
+ramp(colors("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725"));
+
+var magma = ramp(colors("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf"));
+
+var inferno = ramp(colors("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4"));
+
+var plasma = ramp(colors("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921"));
+
+function cubicInOut(t) {
+ return ((t *= 2) <= 1 ? t * t * t : (t -= 2) * t * t + 2) / 2;
+}
+
+var pi = Math.PI;
+
+var tau = 2 * Math.PI;
+
+var noop = {value: function() {}};
+
+function dispatch() {
+ for (var i = 0, n = arguments.length, _ = {}, t; i < n; ++i) {
+ if (!(t = arguments[i] + "") || (t in _)) throw new Error("illegal type: " + t);
+ _[t] = [];
+ }
+ return new Dispatch(_);
+}
+
+function Dispatch(_) {
+ this._ = _;
+}
+
+function parseTypenames$1(typenames, types) {
+ return typenames.trim().split(/^|\s+/).map(function(t) {
+ var name = "", i = t.indexOf(".");
+ if (i >= 0) name = t.slice(i + 1), t = t.slice(0, i);
+ if (t && !types.hasOwnProperty(t)) throw new Error("unknown type: " + t);
+ return {type: t, name: name};
+ });
+}
+
+Dispatch.prototype = dispatch.prototype = {
+ constructor: Dispatch,
+ on: function(typename, callback) {
+ var _ = this._,
+ T = parseTypenames$1(typename + "", _),
+ t,
+ i = -1,
+ n = T.length;
+
+ // If no callback was specified, return the callback of the given type and name.
+ if (arguments.length < 2) {
+ while (++i < n) if ((t = (typename = T[i]).type) && (t = get$1(_[t], typename.name))) return t;
+ return;
+ }
+
+ // If a type was specified, set the callback for the given type and name.
+ // Otherwise, if a null callback was specified, remove callbacks of the given name.
+ if (callback != null && typeof callback !== "function") throw new Error("invalid callback: " + callback);
+ while (++i < n) {
+ if (t = (typename = T[i]).type) _[t] = set$3(_[t], typename.name, callback);
+ else if (callback == null) for (t in _) _[t] = set$3(_[t], typename.name, null);
+ }
+
+ return this;
+ },
+ copy: function() {
+ var copy = {}, _ = this._;
+ for (var t in _) copy[t] = _[t].slice();
+ return new Dispatch(copy);
+ },
+ call: function(type, that) {
+ if ((n = arguments.length - 2) > 0) for (var args = new Array(n), i = 0, n, t; i < n; ++i) args[i] = arguments[i + 2];
+ if (!this._.hasOwnProperty(type)) throw new Error("unknown type: " + type);
+ for (t = this._[type], i = 0, n = t.length; i < n; ++i) t[i].value.apply(that, args);
+ },
+ apply: function(type, that, args) {
+ if (!this._.hasOwnProperty(type)) throw new Error("unknown type: " + type);
+ for (var t = this._[type], i = 0, n = t.length; i < n; ++i) t[i].value.apply(that, args);
+ }
+};
+
+function get$1(type, name) {
+ for (var i = 0, n = type.length, c; i < n; ++i) {
+ if ((c = type[i]).name === name) {
+ return c.value;
+ }
+ }
+}
+
+function set$3(type, name, callback) {
+ for (var i = 0, n = type.length; i < n; ++i) {
+ if (type[i].name === name) {
+ type[i] = noop, type = type.slice(0, i).concat(type.slice(i + 1));
+ break;
+ }
+ }
+ if (callback != null) type.push({name: name, value: callback});
+ return type;
+}
+
+var frame = 0;
+var timeout = 0;
+var interval = 0;
+var pokeDelay = 1000;
+var taskHead;
+var taskTail;
+var clockLast = 0;
+var clockNow = 0;
+var clockSkew = 0;
+var clock = typeof performance === "object" && performance.now ? performance : Date;
+var setFrame = typeof window === "object" && window.requestAnimationFrame ? window.requestAnimationFrame.bind(window) : function(f) { setTimeout(f, 17); };
+
+function now() {
+ return clockNow || (setFrame(clearNow), clockNow = clock.now() + clockSkew);
+}
+
+function clearNow() {
+ clockNow = 0;
+}
+
+function Timer() {
+ this._call =
+ this._time =
+ this._next = null;
+}
+
+Timer.prototype = timer.prototype = {
+ constructor: Timer,
+ restart: function(callback, delay, time) {
+ if (typeof callback !== "function") throw new TypeError("callback is not a function");
+ time = (time == null ? now() : +time) + (delay == null ? 0 : +delay);
+ if (!this._next && taskTail !== this) {
+ if (taskTail) taskTail._next = this;
+ else taskHead = this;
+ taskTail = this;
+ }
+ this._call = callback;
+ this._time = time;
+ sleep();
+ },
+ stop: function() {
+ if (this._call) {
+ this._call = null;
+ this._time = Infinity;
+ sleep();
+ }
+ }
+};
+
+function timer(callback, delay, time) {
+ var t = new Timer;
+ t.restart(callback, delay, time);
+ return t;
+}
+
+function timerFlush() {
+ now(); // Get the current time, if not already set.
+ ++frame; // Pretend we’ve set an alarm, if we haven’t already.
+ var t = taskHead, e;
+ while (t) {
+ if ((e = clockNow - t._time) >= 0) t._call.call(null, e);
+ t = t._next;
+ }
+ --frame;
+}
+
+function wake() {
+ clockNow = (clockLast = clock.now()) + clockSkew;
+ frame = timeout = 0;
+ try {
+ timerFlush();
+ } finally {
+ frame = 0;
+ nap();
+ clockNow = 0;
+ }
+}
+
+function poke() {
+ var now = clock.now(), delay = now - clockLast;
+ if (delay > pokeDelay) clockSkew -= delay, clockLast = now;
+}
+
+function nap() {
+ var t0, t1 = taskHead, t2, time = Infinity;
+ while (t1) {
+ if (t1._call) {
+ if (time > t1._time) time = t1._time;
+ t0 = t1, t1 = t1._next;
+ } else {
+ t2 = t1._next, t1._next = null;
+ t1 = t0 ? t0._next = t2 : taskHead = t2;
+ }
+ }
+ taskTail = t0;
+ sleep(time);
+}
+
+function sleep(time) {
+ if (frame) return; // Soonest alarm already set, or will be.
+ if (timeout) timeout = clearTimeout(timeout);
+ var delay = time - clockNow; // Strictly less than if we recomputed clockNow.
+ if (delay > 24) {
+ if (time < Infinity) timeout = setTimeout(wake, time - clock.now() - clockSkew);
+ if (interval) interval = clearInterval(interval);
+ } else {
+ if (!interval) clockLast = clock.now(), interval = setInterval(poke, pokeDelay);
+ frame = 1, setFrame(wake);
+ }
+}
+
+var timeout$1 = function(callback, delay, time) {
+ var t = new Timer;
+ delay = delay == null ? 0 : +delay;
+ t.restart(function(elapsed) {
+ t.stop();
+ callback(elapsed + delay);
+ }, delay, time);
+ return t;
+};
+
+var emptyOn = dispatch("start", "end", "interrupt");
+var emptyTween = [];
+
+var CREATED = 0;
+var SCHEDULED = 1;
+var STARTING = 2;
+var STARTED = 3;
+var RUNNING = 4;
+var ENDING = 5;
+var ENDED = 6;
+
+var schedule = function(node, name, id, index, group, timing) {
+ var schedules = node.__transition;
+ if (!schedules) node.__transition = {};
+ else if (id in schedules) return;
+ create(node, id, {
+ name: name,
+ index: index, // For context during callback.
+ group: group, // For context during callback.
+ on: emptyOn,
+ tween: emptyTween,
+ time: timing.time,
+ delay: timing.delay,
+ duration: timing.duration,
+ ease: timing.ease,
+ timer: null,
+ state: CREATED
+ });
+};
+
+function init(node, id) {
+ var schedule = node.__transition;
+ if (!schedule || !(schedule = schedule[id]) || schedule.state > CREATED) throw new Error("too late");
+ return schedule;
+}
+
+function set$2(node, id) {
+ var schedule = node.__transition;
+ if (!schedule || !(schedule = schedule[id]) || schedule.state > STARTING) throw new Error("too late");
+ return schedule;
+}
+
+function get(node, id) {
+ var schedule = node.__transition;
+ if (!schedule || !(schedule = schedule[id])) throw new Error("too late");
+ return schedule;
+}
+
+function create(node, id, self) {
+ var schedules = node.__transition,
+ tween;
+
+ // Initialize the self timer when the transition is created.
+ // Note the actual delay is not known until the first callback!
+ schedules[id] = self;
+ self.timer = timer(schedule, 0, self.time);
+
+ function schedule(elapsed) {
+ self.state = SCHEDULED;
+ self.timer.restart(start, self.delay, self.time);
+
+ // If the elapsed delay is less than our first sleep, start immediately.
+ if (self.delay <= elapsed) start(elapsed - self.delay);
+ }
+
+ function start(elapsed) {
+ var i, j, n, o;
+
+ // If the state is not SCHEDULED, then we previously errored on start.
+ if (self.state !== SCHEDULED) return stop();
+
+ for (i in schedules) {
+ o = schedules[i];
+ if (o.name !== self.name) continue;
+
+ // While this element already has a starting transition during this frame,
+ // defer starting an interrupting transition until that transition has a
+ // chance to tick (and possibly end); see d3/d3-transition#54!
+ if (o.state === STARTED) return timeout$1(start);
+
+ // Interrupt the active transition, if any.
+ // Dispatch the interrupt event.
+ if (o.state === RUNNING) {
+ o.state = ENDED;
+ o.timer.stop();
+ o.on.call("interrupt", node, node.__data__, o.index, o.group);
+ delete schedules[i];
+ }
+
+ // Cancel any pre-empted transitions. No interrupt event is dispatched
+ // because the cancelled transitions never started. Note that this also
+ // removes this transition from the pending list!
+ else if (+i < id) {
+ o.state = ENDED;
+ o.timer.stop();
+ delete schedules[i];
+ }
+ }
+
+ // Defer the first tick to end of the current frame; see d3/d3#1576.
+ // Note the transition may be canceled after start and before the first tick!
+ // Note this must be scheduled before the start event; see d3/d3-transition#16!
+ // Assuming this is successful, subsequent callbacks go straight to tick.
+ timeout$1(function() {
+ if (self.state === STARTED) {
+ self.state = RUNNING;
+ self.timer.restart(tick, self.delay, self.time);
+ tick(elapsed);
+ }
+ });
+
+ // Dispatch the start event.
+ // Note this must be done before the tween are initialized.
+ self.state = STARTING;
+ self.on.call("start", node, node.__data__, self.index, self.group);
+ if (self.state !== STARTING) return; // interrupted
+ self.state = STARTED;
+
+ // Initialize the tween, deleting null tween.
+ tween = new Array(n = self.tween.length);
+ for (i = 0, j = -1; i < n; ++i) {
+ if (o = self.tween[i].value.call(node, node.__data__, self.index, self.group)) {
+ tween[++j] = o;
+ }
+ }
+ tween.length = j + 1;
+ }
+
+ function tick(elapsed) {
+ var t = elapsed < self.duration ? self.ease.call(null, elapsed / self.duration) : (self.timer.restart(stop), self.state = ENDING, 1),
+ i = -1,
+ n = tween.length;
+
+ while (++i < n) {
+ tween[i].call(null, t);
+ }
+
+ // Dispatch the end event.
+ if (self.state === ENDING) {
+ self.on.call("end", node, node.__data__, self.index, self.group);
+ stop();
+ }
+ }
+
+ function stop() {
+ self.state = ENDED;
+ self.timer.stop();
+ delete schedules[id];
+ for (var i in schedules) return; // eslint-disable-line no-unused-vars
+ delete node.__transition;
+ }
+}
+
+var interrupt = function(node, name) {
+ var schedules = node.__transition,
+ schedule$$1,
+ active,
+ empty = true,
+ i;
+
+ if (!schedules) return;
+
+ name = name == null ? null : name + "";
+
+ for (i in schedules) {
+ if ((schedule$$1 = schedules[i]).name !== name) { empty = false; continue; }
+ active = schedule$$1.state > STARTING && schedule$$1.state < ENDING;
+ schedule$$1.state = ENDED;
+ schedule$$1.timer.stop();
+ if (active) schedule$$1.on.call("interrupt", node, node.__data__, schedule$$1.index, schedule$$1.group);
+ delete schedules[i];
+ }
+
+ if (empty) delete node.__transition;
+};
+
+var selection_interrupt = function(name) {
+ return this.each(function() {
+ interrupt(this, name);
+ });
+};
+
+function tweenRemove(id, name) {
+ var tween0, tween1;
+ return function() {
+ var schedule$$1 = set$2(this, id),
+ tween = schedule$$1.tween;
+
+ // If this node shared tween with the previous node,
+ // just assign the updated shared tween and we’re done!
+ // Otherwise, copy-on-write.
+ if (tween !== tween0) {
+ tween1 = tween0 = tween;
+ for (var i = 0, n = tween1.length; i < n; ++i) {
+ if (tween1[i].name === name) {
+ tween1 = tween1.slice();
+ tween1.splice(i, 1);
+ break;
+ }
+ }
+ }
+
+ schedule$$1.tween = tween1;
+ };
+}
+
+function tweenFunction(id, name, value) {
+ var tween0, tween1;
+ if (typeof value !== "function") throw new Error;
+ return function() {
+ var schedule$$1 = set$2(this, id),
+ tween = schedule$$1.tween;
+
+ // If this node shared tween with the previous node,
+ // just assign the updated shared tween and we’re done!
+ // Otherwise, copy-on-write.
+ if (tween !== tween0) {
+ tween1 = (tween0 = tween).slice();
+ for (var t = {name: name, value: value}, i = 0, n = tween1.length; i < n; ++i) {
+ if (tween1[i].name === name) {
+ tween1[i] = t;
+ break;
+ }
+ }
+ if (i === n) tween1.push(t);
+ }
+
+ schedule$$1.tween = tween1;
+ };
+}
+
+var transition_tween = function(name, value) {
+ var id = this._id;
+
+ name += "";
+
+ if (arguments.length < 2) {
+ var tween = get(this.node(), id).tween;
+ for (var i = 0, n = tween.length, t; i < n; ++i) {
+ if ((t = tween[i]).name === name) {
+ return t.value;
+ }
+ }
+ return null;
+ }
+
+ return this.each((value == null ? tweenRemove : tweenFunction)(id, name, value));
+};
+
+function tweenValue(transition, name, value) {
+ var id = transition._id;
+
+ transition.each(function() {
+ var schedule$$1 = set$2(this, id);
+ (schedule$$1.value || (schedule$$1.value = {}))[name] = value.apply(this, arguments);
+ });
+
+ return function(node) {
+ return get(node, id).value[name];
+ };
+}
+
+var interpolate = function(a, b) {
+ var c;
+ return (typeof b === "number" ? interpolateNumber
+ : b instanceof color ? interpolateRgb
+ : (c = color(b)) ? (b = c, interpolateRgb)
+ : interpolateString)(a, b);
+};
+
+function attrRemove$1(name) {
+ return function() {
+ this.removeAttribute(name);
+ };
+}
+
+function attrRemoveNS$1(fullname) {
+ return function() {
+ this.removeAttributeNS(fullname.space, fullname.local);
+ };
+}
+
+function attrConstant$1(name, interpolate$$1, value1) {
+ var value00,
+ interpolate0;
+ return function() {
+ var value0 = this.getAttribute(name);
+ return value0 === value1 ? null
+ : value0 === value00 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value1);
+ };
+}
+
+function attrConstantNS$1(fullname, interpolate$$1, value1) {
+ var value00,
+ interpolate0;
+ return function() {
+ var value0 = this.getAttributeNS(fullname.space, fullname.local);
+ return value0 === value1 ? null
+ : value0 === value00 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value1);
+ };
+}
+
+function attrFunction$1(name, interpolate$$1, value) {
+ var value00,
+ value10,
+ interpolate0;
+ return function() {
+ var value0, value1 = value(this);
+ if (value1 == null) return void this.removeAttribute(name);
+ value0 = this.getAttribute(name);
+ return value0 === value1 ? null
+ : value0 === value00 && value1 === value10 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value10 = value1);
+ };
+}
+
+function attrFunctionNS$1(fullname, interpolate$$1, value) {
+ var value00,
+ value10,
+ interpolate0;
+ return function() {
+ var value0, value1 = value(this);
+ if (value1 == null) return void this.removeAttributeNS(fullname.space, fullname.local);
+ value0 = this.getAttributeNS(fullname.space, fullname.local);
+ return value0 === value1 ? null
+ : value0 === value00 && value1 === value10 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value10 = value1);
+ };
+}
+
+var transition_attr = function(name, value) {
+ var fullname = namespace(name), i = fullname === "transform" ? interpolateTransformSvg : interpolate;
+ return this.attrTween(name, typeof value === "function"
+ ? (fullname.local ? attrFunctionNS$1 : attrFunction$1)(fullname, i, tweenValue(this, "attr." + name, value))
+ : value == null ? (fullname.local ? attrRemoveNS$1 : attrRemove$1)(fullname)
+ : (fullname.local ? attrConstantNS$1 : attrConstant$1)(fullname, i, value + ""));
+};
+
+function attrTweenNS(fullname, value) {
+ function tween() {
+ var node = this, i = value.apply(node, arguments);
+ return i && function(t) {
+ node.setAttributeNS(fullname.space, fullname.local, i(t));
+ };
+ }
+ tween._value = value;
+ return tween;
+}
+
+function attrTween(name, value) {
+ function tween() {
+ var node = this, i = value.apply(node, arguments);
+ return i && function(t) {
+ node.setAttribute(name, i(t));
+ };
+ }
+ tween._value = value;
+ return tween;
+}
+
+var transition_attrTween = function(name, value) {
+ var key = "attr." + name;
+ if (arguments.length < 2) return (key = this.tween(key)) && key._value;
+ if (value == null) return this.tween(key, null);
+ if (typeof value !== "function") throw new Error;
+ var fullname = namespace(name);
+ return this.tween(key, (fullname.local ? attrTweenNS : attrTween)(fullname, value));
+};
+
+function delayFunction(id, value) {
+ return function() {
+ init(this, id).delay = +value.apply(this, arguments);
+ };
+}
+
+function delayConstant(id, value) {
+ return value = +value, function() {
+ init(this, id).delay = value;
+ };
+}
+
+var transition_delay = function(value) {
+ var id = this._id;
+
+ return arguments.length
+ ? this.each((typeof value === "function"
+ ? delayFunction
+ : delayConstant)(id, value))
+ : get(this.node(), id).delay;
+};
+
+function durationFunction(id, value) {
+ return function() {
+ set$2(this, id).duration = +value.apply(this, arguments);
+ };
+}
+
+function durationConstant(id, value) {
+ return value = +value, function() {
+ set$2(this, id).duration = value;
+ };
+}
+
+var transition_duration = function(value) {
+ var id = this._id;
+
+ return arguments.length
+ ? this.each((typeof value === "function"
+ ? durationFunction
+ : durationConstant)(id, value))
+ : get(this.node(), id).duration;
+};
+
+function easeConstant(id, value) {
+ if (typeof value !== "function") throw new Error;
+ return function() {
+ set$2(this, id).ease = value;
+ };
+}
+
+var transition_ease = function(value) {
+ var id = this._id;
+
+ return arguments.length
+ ? this.each(easeConstant(id, value))
+ : get(this.node(), id).ease;
+};
+
+var transition_filter = function(match) {
+ if (typeof match !== "function") match = matcher$1(match);
+
+ for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, subgroup = subgroups[j] = [], node, i = 0; i < n; ++i) {
+ if ((node = group[i]) && match.call(node, node.__data__, i, group)) {
+ subgroup.push(node);
+ }
+ }
+ }
+
+ return new Transition(subgroups, this._parents, this._name, this._id);
+};
+
+var transition_merge = function(transition$$1) {
+ if (transition$$1._id !== this._id) throw new Error;
+
+ for (var groups0 = this._groups, groups1 = transition$$1._groups, m0 = groups0.length, m1 = groups1.length, m = Math.min(m0, m1), merges = new Array(m0), j = 0; j < m; ++j) {
+ for (var group0 = groups0[j], group1 = groups1[j], n = group0.length, merge = merges[j] = new Array(n), node, i = 0; i < n; ++i) {
+ if (node = group0[i] || group1[i]) {
+ merge[i] = node;
+ }
+ }
+ }
+
+ for (; j < m0; ++j) {
+ merges[j] = groups0[j];
+ }
+
+ return new Transition(merges, this._parents, this._name, this._id);
+};
+
+function start(name) {
+ return (name + "").trim().split(/^|\s+/).every(function(t) {
+ var i = t.indexOf(".");
+ if (i >= 0) t = t.slice(0, i);
+ return !t || t === "start";
+ });
+}
+
+function onFunction(id, name, listener) {
+ var on0, on1, sit = start(name) ? init : set$2;
+ return function() {
+ var schedule$$1 = sit(this, id),
+ on = schedule$$1.on;
+
+ // If this node shared a dispatch with the previous node,
+ // just assign the updated shared dispatch and we’re done!
+ // Otherwise, copy-on-write.
+ if (on !== on0) (on1 = (on0 = on).copy()).on(name, listener);
+
+ schedule$$1.on = on1;
+ };
+}
+
+var transition_on = function(name, listener) {
+ var id = this._id;
+
+ return arguments.length < 2
+ ? get(this.node(), id).on.on(name)
+ : this.each(onFunction(id, name, listener));
+};
+
+function removeFunction(id) {
+ return function() {
+ var parent = this.parentNode;
+ for (var i in this.__transition) if (+i !== id) return;
+ if (parent) parent.removeChild(this);
+ };
+}
+
+var transition_remove = function() {
+ return this.on("end.remove", removeFunction(this._id));
+};
+
+var transition_select = function(select) {
+ var name = this._name,
+ id = this._id;
+
+ if (typeof select !== "function") select = selector(select);
+
+ for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, subgroup = subgroups[j] = new Array(n), node, subnode, i = 0; i < n; ++i) {
+ if ((node = group[i]) && (subnode = select.call(node, node.__data__, i, group))) {
+ if ("__data__" in node) subnode.__data__ = node.__data__;
+ subgroup[i] = subnode;
+ schedule(subgroup[i], name, id, i, subgroup, get(node, id));
+ }
+ }
+ }
+
+ return new Transition(subgroups, this._parents, name, id);
+};
+
+var transition_selectAll = function(select) {
+ var name = this._name,
+ id = this._id;
+
+ if (typeof select !== "function") select = selectorAll(select);
+
+ for (var groups = this._groups, m = groups.length, subgroups = [], parents = [], j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) {
+ if (node = group[i]) {
+ for (var children = select.call(node, node.__data__, i, group), child, inherit = get(node, id), k = 0, l = children.length; k < l; ++k) {
+ if (child = children[k]) {
+ schedule(child, name, id, k, children, inherit);
+ }
+ }
+ subgroups.push(children);
+ parents.push(node);
+ }
+ }
+ }
+
+ return new Transition(subgroups, parents, name, id);
+};
+
+var Selection$1 = selection.prototype.constructor;
+
+var transition_selection = function() {
+ return new Selection$1(this._groups, this._parents);
+};
+
+function styleRemove$1(name, interpolate$$1) {
+ var value00,
+ value10,
+ interpolate0;
+ return function() {
+ var value0 = styleValue(this, name),
+ value1 = (this.style.removeProperty(name), styleValue(this, name));
+ return value0 === value1 ? null
+ : value0 === value00 && value1 === value10 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value10 = value1);
+ };
+}
+
+function styleRemoveEnd(name) {
+ return function() {
+ this.style.removeProperty(name);
+ };
+}
+
+function styleConstant$1(name, interpolate$$1, value1) {
+ var value00,
+ interpolate0;
+ return function() {
+ var value0 = styleValue(this, name);
+ return value0 === value1 ? null
+ : value0 === value00 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value1);
+ };
+}
+
+function styleFunction$1(name, interpolate$$1, value) {
+ var value00,
+ value10,
+ interpolate0;
+ return function() {
+ var value0 = styleValue(this, name),
+ value1 = value(this);
+ if (value1 == null) value1 = (this.style.removeProperty(name), styleValue(this, name));
+ return value0 === value1 ? null
+ : value0 === value00 && value1 === value10 ? interpolate0
+ : interpolate0 = interpolate$$1(value00 = value0, value10 = value1);
+ };
+}
+
+var transition_style = function(name, value, priority) {
+ var i = (name += "") === "transform" ? interpolateTransformCss : interpolate;
+ return value == null ? this
+ .styleTween(name, styleRemove$1(name, i))
+ .on("end.style." + name, styleRemoveEnd(name))
+ : this.styleTween(name, typeof value === "function"
+ ? styleFunction$1(name, i, tweenValue(this, "style." + name, value))
+ : styleConstant$1(name, i, value + ""), priority);
+};
+
+function styleTween(name, value, priority) {
+ function tween() {
+ var node = this, i = value.apply(node, arguments);
+ return i && function(t) {
+ node.style.setProperty(name, i(t), priority);
+ };
+ }
+ tween._value = value;
+ return tween;
+}
+
+var transition_styleTween = function(name, value, priority) {
+ var key = "style." + (name += "");
+ if (arguments.length < 2) return (key = this.tween(key)) && key._value;
+ if (value == null) return this.tween(key, null);
+ if (typeof value !== "function") throw new Error;
+ return this.tween(key, styleTween(name, value, priority == null ? "" : priority));
+};
+
+function textConstant$1(value) {
+ return function() {
+ this.textContent = value;
+ };
+}
+
+function textFunction$1(value) {
+ return function() {
+ var value1 = value(this);
+ this.textContent = value1 == null ? "" : value1;
+ };
+}
+
+var transition_text = function(value) {
+ return this.tween("text", typeof value === "function"
+ ? textFunction$1(tweenValue(this, "text", value))
+ : textConstant$1(value == null ? "" : value + ""));
+};
+
+var transition_transition = function() {
+ var name = this._name,
+ id0 = this._id,
+ id1 = newId();
+
+ for (var groups = this._groups, m = groups.length, j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) {
+ if (node = group[i]) {
+ var inherit = get(node, id0);
+ schedule(node, name, id1, i, group, {
+ time: inherit.time + inherit.delay + inherit.duration,
+ delay: 0,
+ duration: inherit.duration,
+ ease: inherit.ease
+ });
+ }
+ }
+ }
+
+ return new Transition(groups, this._parents, name, id1);
+};
+
+var id = 0;
+
+function Transition(groups, parents, name, id) {
+ this._groups = groups;
+ this._parents = parents;
+ this._name = name;
+ this._id = id;
+}
+
+function transition(name) {
+ return selection().transition(name);
+}
+
+function newId() {
+ return ++id;
+}
+
+var selection_prototype = selection.prototype;
+
+Transition.prototype = transition.prototype = {
+ constructor: Transition,
+ select: transition_select,
+ selectAll: transition_selectAll,
+ filter: transition_filter,
+ merge: transition_merge,
+ selection: transition_selection,
+ transition: transition_transition,
+ call: selection_prototype.call,
+ nodes: selection_prototype.nodes,
+ node: selection_prototype.node,
+ size: selection_prototype.size,
+ empty: selection_prototype.empty,
+ each: selection_prototype.each,
+ on: transition_on,
+ attr: transition_attr,
+ attrTween: transition_attrTween,
+ style: transition_style,
+ styleTween: transition_styleTween,
+ text: transition_text,
+ remove: transition_remove,
+ tween: transition_tween,
+ delay: transition_delay,
+ duration: transition_duration,
+ ease: transition_ease
+};
+
+var defaultTiming = {
+ time: null, // Set on use.
+ delay: 0,
+ duration: 250,
+ ease: cubicInOut
+};
+
+function inherit(node, id) {
+ var timing;
+ while (!(timing = node.__transition) || !(timing = timing[id])) {
+ if (!(node = node.parentNode)) {
+ return defaultTiming.time = now(), defaultTiming;
+ }
+ }
+ return timing;
+}
+
+var selection_transition = function(name) {
+ var id,
+ timing;
+
+ if (name instanceof Transition) {
+ id = name._id, name = name._name;
+ } else {
+ id = newId(), (timing = defaultTiming).time = now(), name = name == null ? null : name + "";
+ }
+
+ for (var groups = this._groups, m = groups.length, j = 0; j < m; ++j) {
+ for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) {
+ if (node = group[i]) {
+ schedule(node, name, id, i, group, timing || inherit(node, id));
+ }
+ }
+ }
+
+ return new Transition(groups, this._parents, name, id);
+};
+
+selection.prototype.interrupt = selection_interrupt;
+selection.prototype.transition = selection_transition;
+
+exports.select = select;
+exports.selection = selection;
+exports.hierarchy = hierarchy;
+exports.partition = partition;
+exports.scaleLinear = linear;
+exports.easeCubic = cubicInOut;
+exports.ascending = ascending$1;
+exports.map = map$1;
+exports.transition = transition;
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+})));
+`
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/LICENSE b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go
new file mode 100644
index 0000000..58a7fb4
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/d3flamegraph/d3_flame_graph.go
@@ -0,0 +1,1009 @@
+// A D3.js plugin that produces flame graphs from hierarchical data.
+// https://github.com/spiermar/d3-flame-graph
+// Version 2.0.0-alpha4
+// See LICENSE file for license details
+
+package d3flamegraph
+
+// JSSource returns the d3-flamegraph.js file
+const JSSource = `
+(function (global, factory) {
+ typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('d3')) :
+ typeof define === 'function' && define.amd ? define(['exports', 'd3'], factory) :
+ (factory((global.d3 = global.d3 || {}),global.d3));
+}(this, (function (exports,d3) { 'use strict';
+
+var d3__default = 'default' in d3 ? d3['default'] : d3;
+
+var commonjsGlobal = typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
+
+
+
+
+
+function createCommonjsModule(fn, module) {
+ return module = { exports: {} }, fn(module, module.exports), module.exports;
+}
+
+var d3Tip = createCommonjsModule(function (module) {
+// d3.tip
+// Copyright (c) 2013 Justin Palmer
+//
+// Tooltips for d3.js SVG visualizations
+
+(function (root, factory) {
+ if (typeof undefined === 'function' && undefined.amd) {
+ // AMD. Register as an anonymous module with d3 as a dependency.
+ undefined(['d3'], factory);
+ } else if ('object' === 'object' && module.exports) {
+ // CommonJS
+ var d3$$1 = d3__default;
+ module.exports = factory(d3$$1);
+ } else {
+ // Browser global.
+ root.d3.tip = factory(root.d3);
+ }
+}(commonjsGlobal, function (d3$$1) {
+
+ // Public - contructs a new tooltip
+ //
+ // Returns a tip
+ return function() {
+ var direction = d3_tip_direction,
+ offset = d3_tip_offset,
+ html = d3_tip_html,
+ node = initNode(),
+ svg = null,
+ point = null,
+ target = null;
+
+ function tip(vis) {
+ svg = getSVGNode(vis);
+ point = svg.createSVGPoint();
+ document.body.appendChild(node);
+ }
+
+ // Public - show the tooltip on the screen
+ //
+ // Returns a tip
+ tip.show = function() {
+ var args = Array.prototype.slice.call(arguments);
+ if(args[args.length - 1] instanceof SVGElement) target = args.pop();
+
+ var content = html.apply(this, args),
+ poffset = offset.apply(this, args),
+ dir = direction.apply(this, args),
+ nodel = getNodeEl(),
+ i = directions.length,
+ coords,
+ scrollTop = document.documentElement.scrollTop || document.body.scrollTop,
+ scrollLeft = document.documentElement.scrollLeft || document.body.scrollLeft;
+
+ nodel.html(content)
+ .style('opacity', 1).style('pointer-events', 'all');
+
+ while(i--) nodel.classed(directions[i], false);
+ coords = direction_callbacks.get(dir).apply(this);
+ nodel.classed(dir, true)
+ .style('top', (coords.top + poffset[0]) + scrollTop + 'px')
+ .style('left', (coords.left + poffset[1]) + scrollLeft + 'px');
+
+ return tip;
+ };
+
+ // Public - hide the tooltip
+ //
+ // Returns a tip
+ tip.hide = function() {
+ var nodel = getNodeEl();
+ nodel.style('opacity', 0).style('pointer-events', 'none');
+ return tip
+ };
+
+ // Public: Proxy attr calls to the d3 tip container. Sets or gets attribute value.
+ //
+ // n - name of the attribute
+ // v - value of the attribute
+ //
+ // Returns tip or attribute value
+ tip.attr = function(n, v) {
+ if (arguments.length < 2 && typeof n === 'string') {
+ return getNodeEl().attr(n)
+ } else {
+ var args = Array.prototype.slice.call(arguments);
+ d3$$1.selection.prototype.attr.apply(getNodeEl(), args);
+ }
+
+ return tip
+ };
+
+ // Public: Proxy style calls to the d3 tip container. Sets or gets a style value.
+ //
+ // n - name of the property
+ // v - value of the property
+ //
+ // Returns tip or style property value
+ tip.style = function(n, v) {
+ if (arguments.length < 2 && typeof n === 'string') {
+ return getNodeEl().style(n)
+ } else {
+ var args = Array.prototype.slice.call(arguments);
+ d3$$1.selection.prototype.style.apply(getNodeEl(), args);
+ }
+
+ return tip
+ };
+
+ // Public: Set or get the direction of the tooltip
+ //
+ // v - One of n(north), s(south), e(east), or w(west), nw(northwest),
+ // sw(southwest), ne(northeast) or se(southeast)
+ //
+ // Returns tip or direction
+ tip.direction = function(v) {
+ if (!arguments.length) return direction
+ direction = v == null ? v : functor(v);
+
+ return tip
+ };
+
+ // Public: Sets or gets the offset of the tip
+ //
+ // v - Array of [x, y] offset
+ //
+ // Returns offset or
+ tip.offset = function(v) {
+ if (!arguments.length) return offset
+ offset = v == null ? v : functor(v);
+
+ return tip
+ };
+
+ // Public: sets or gets the html value of the tooltip
+ //
+ // v - String value of the tip
+ //
+ // Returns html value or tip
+ tip.html = function(v) {
+ if (!arguments.length) return html
+ html = v == null ? v : functor(v);
+
+ return tip
+ };
+
+ // Public: destroys the tooltip and removes it from the DOM
+ //
+ // Returns a tip
+ tip.destroy = function() {
+ if(node) {
+ getNodeEl().remove();
+ node = null;
+ }
+ return tip;
+ };
+
+ function d3_tip_direction() { return 'n' }
+ function d3_tip_offset() { return [0, 0] }
+ function d3_tip_html() { return ' ' }
+
+ var direction_callbacks = d3$$1.map({
+ n: direction_n,
+ s: direction_s,
+ e: direction_e,
+ w: direction_w,
+ nw: direction_nw,
+ ne: direction_ne,
+ sw: direction_sw,
+ se: direction_se
+ }),
+
+ directions = direction_callbacks.keys();
+
+ function direction_n() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.n.y - node.offsetHeight,
+ left: bbox.n.x - node.offsetWidth / 2
+ }
+ }
+
+ function direction_s() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.s.y,
+ left: bbox.s.x - node.offsetWidth / 2
+ }
+ }
+
+ function direction_e() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.e.y - node.offsetHeight / 2,
+ left: bbox.e.x
+ }
+ }
+
+ function direction_w() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.w.y - node.offsetHeight / 2,
+ left: bbox.w.x - node.offsetWidth
+ }
+ }
+
+ function direction_nw() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.nw.y - node.offsetHeight,
+ left: bbox.nw.x - node.offsetWidth
+ }
+ }
+
+ function direction_ne() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.ne.y - node.offsetHeight,
+ left: bbox.ne.x
+ }
+ }
+
+ function direction_sw() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.sw.y,
+ left: bbox.sw.x - node.offsetWidth
+ }
+ }
+
+ function direction_se() {
+ var bbox = getScreenBBox();
+ return {
+ top: bbox.se.y,
+ left: bbox.e.x
+ }
+ }
+
+ function initNode() {
+ var node = d3$$1.select(document.createElement('div'));
+ node.style('position', 'absolute').style('top', 0).style('opacity', 0)
+ .style('pointer-events', 'none').style('box-sizing', 'border-box');
+
+ return node.node()
+ }
+
+ function getSVGNode(el) {
+ el = el.node();
+ if(el.tagName.toLowerCase() === 'svg')
+ return el
+
+ return el.ownerSVGElement
+ }
+
+ function getNodeEl() {
+ if(node === null) {
+ node = initNode();
+ // re-add node to DOM
+ document.body.appendChild(node);
+ }
+ return d3$$1.select(node);
+ }
+
+ // Private - gets the screen coordinates of a shape
+ //
+ // Given a shape on the screen, will return an SVGPoint for the directions
+ // n(north), s(south), e(east), w(west), ne(northeast), se(southeast), nw(northwest),
+ // sw(southwest).
+ //
+ // +-+-+
+ // | |
+ // + +
+ // | |
+ // +-+-+
+ //
+ // Returns an Object {n, s, e, w, nw, sw, ne, se}
+ function getScreenBBox() {
+ var targetel = target || d3$$1.event.target;
+
+ while ('undefined' === typeof targetel.getScreenCTM && 'undefined' === targetel.parentNode) {
+ targetel = targetel.parentNode;
+ }
+
+ var bbox = {},
+ matrix = targetel.getScreenCTM(),
+ tbbox = targetel.getBBox(),
+ width = tbbox.width,
+ height = tbbox.height,
+ x = tbbox.x,
+ y = tbbox.y;
+
+ point.x = x;
+ point.y = y;
+ bbox.nw = point.matrixTransform(matrix);
+ point.x += width;
+ bbox.ne = point.matrixTransform(matrix);
+ point.y += height;
+ bbox.se = point.matrixTransform(matrix);
+ point.x -= width;
+ bbox.sw = point.matrixTransform(matrix);
+ point.y -= height / 2;
+ bbox.w = point.matrixTransform(matrix);
+ point.x += width;
+ bbox.e = point.matrixTransform(matrix);
+ point.x -= width / 2;
+ point.y -= height / 2;
+ bbox.n = point.matrixTransform(matrix);
+ point.y += height;
+ bbox.s = point.matrixTransform(matrix);
+
+ return bbox
+ }
+
+ // Private - replace D3JS 3.X d3.functor() function
+ function functor(v) {
+ return typeof v === "function" ? v : function() {
+ return v
+ }
+ }
+
+ return tip
+ };
+
+}));
+});
+
+var flamegraph = function () {
+ var w = 960; // graph width
+ var h = null; // graph height
+ var c = 18; // cell height
+ var selection = null; // selection
+ var tooltip = true; // enable tooltip
+ var title = ''; // graph title
+ var transitionDuration = 750;
+ var transitionEase = d3.easeCubic; // tooltip offset
+ var sort = false;
+ var inverted = false; // invert the graph direction
+ var clickHandler = null;
+ var minFrameSize = 0;
+ var details = null;
+
+ var tip = d3Tip()
+ .direction('s')
+ .offset([8, 0])
+ .attr('class', 'd3-flame-graph-tip')
+ .html(function (d) { return label(d) });
+
+ var svg;
+
+ function name (d) {
+ return d.data.n || d.data.name
+ }
+
+ function libtype (d) {
+ return d.data.l || d.data.libtype
+ }
+
+ function children (d) {
+ return d.c || d.children
+ }
+
+ function value (d) {
+ return d.v || d.value
+ }
+
+ var label = function (d) {
+ return name(d) + ' (' + d3.format('.3f')(100 * (d.x1 - d.x0), 3) + '%, ' + value(d) + ' samples)'
+ };
+
+ function setDetails (t) {
+ if (details) { details.innerHTML = t; }
+ }
+
+ var colorMapper = function (d) {
+ return d.highlight ? '#E600E6' : colorHash(name(d), libtype(d))
+ };
+
+ function generateHash (name) {
+ // Return a vector (0.0->1.0) that is a hash of the input string.
+ // The hash is computed to favor early characters over later ones, so
+ // that strings with similar starts have similar vectors. Only the first
+ // 6 characters are considered.
+ const MAX_CHAR = 6;
+
+ var hash = 0;
+ var maxHash = 0;
+ var weight = 1;
+ var mod = 10;
+
+ if (name) {
+ for (var i = 0; i < name.length; i++) {
+ if (i > MAX_CHAR) { break }
+ hash += weight * (name.charCodeAt(i) % mod);
+ maxHash += weight * (mod - 1);
+ weight *= 0.70;
+ }
+ if (maxHash > 0) { hash = hash / maxHash; }
+ }
+ return hash
+ }
+
+ function colorHash (name, libtype) {
+ // Return a color for the given name and library type. The library type
+ // selects the hue, and the name is hashed to a color in that hue.
+
+ var r;
+ var g;
+ var b;
+
+ // Select hue. Order is important.
+ var hue;
+ if (typeof libtype === 'undefined' || libtype === '') {
+ // default when libtype is not in use
+ hue = 'warm';
+ } else {
+ hue = 'red';
+ if (name.match(/::/)) {
+ hue = 'yellow';
+ }
+ if (libtype === 'kernel') {
+ hue = 'orange';
+ } else if (libtype === 'jit') {
+ hue = 'green';
+ } else if (libtype === 'inlined') {
+ hue = 'aqua';
+ }
+ }
+
+ // calculate hash
+ var vector = 0;
+ if (name) {
+ var nameArr = name.split('` + "`" + `');
+ if (nameArr.length > 1) {
+ name = nameArr[nameArr.length - 1]; // drop module name if present
+ }
+ name = name.split('(')[0]; // drop extra info
+ vector = generateHash(name);
+ }
+
+ // calculate color
+ if (hue === 'red') {
+ r = 200 + Math.round(55 * vector);
+ g = 50 + Math.round(80 * vector);
+ b = g;
+ } else if (hue === 'orange') {
+ r = 190 + Math.round(65 * vector);
+ g = 90 + Math.round(65 * vector);
+ b = 0;
+ } else if (hue === 'yellow') {
+ r = 175 + Math.round(55 * vector);
+ g = r;
+ b = 50 + Math.round(20 * vector);
+ } else if (hue === 'green') {
+ r = 50 + Math.round(60 * vector);
+ g = 200 + Math.round(55 * vector);
+ b = r;
+ } else if (hue === 'aqua') {
+ r = 50 + Math.round(60 * vector);
+ g = 165 + Math.round(55 * vector);
+ b = g;
+ } else {
+ // original warm palette
+ r = 200 + Math.round(55 * vector);
+ g = 0 + Math.round(230 * (1 - vector));
+ b = 0 + Math.round(55 * (1 - vector));
+ }
+
+ return 'rgb(' + r + ',' + g + ',' + b + ')'
+ }
+
+ function hide (d) {
+ d.data.hide = true;
+ if (children(d)) {
+ children(d).forEach(hide);
+ }
+ }
+
+ function show (d) {
+ d.data.fade = false;
+ d.data.hide = false;
+ if (children(d)) {
+ children(d).forEach(show);
+ }
+ }
+
+ function getSiblings (d) {
+ var siblings = [];
+ if (d.parent) {
+ var me = d.parent.children.indexOf(d);
+ siblings = d.parent.children.slice(0);
+ siblings.splice(me, 1);
+ }
+ return siblings
+ }
+
+ function hideSiblings (d) {
+ var siblings = getSiblings(d);
+ siblings.forEach(function (s) {
+ hide(s);
+ });
+ if (d.parent) {
+ hideSiblings(d.parent);
+ }
+ }
+
+ function fadeAncestors (d) {
+ if (d.parent) {
+ d.parent.data.fade = true;
+ fadeAncestors(d.parent);
+ }
+ }
+
+ // function getRoot (d) {
+ // if (d.parent) {
+ // return getRoot(d.parent)
+ // }
+ // return d
+ // }
+
+ function zoom (d) {
+ tip.hide(d);
+ hideSiblings(d);
+ show(d);
+ fadeAncestors(d);
+ update();
+ if (typeof clickHandler === 'function') {
+ clickHandler(d);
+ }
+ }
+
+ function searchTree (d, term) {
+ var re = new RegExp(term);
+ var searchResults = [];
+
+ function searchInner (d) {
+ var label = name(d);
+
+ if (children(d)) {
+ children(d).forEach(function (child) {
+ searchInner(child);
+ });
+ }
+
+ if (label.match(re)) {
+ d.highlight = true;
+ searchResults.push(d);
+ } else {
+ d.highlight = false;
+ }
+ }
+
+ searchInner(d);
+ return searchResults
+ }
+
+ function clear (d) {
+ d.highlight = false;
+ if (children(d)) {
+ children(d).forEach(function (child) {
+ clear(child);
+ });
+ }
+ }
+
+ function doSort (a, b) {
+ if (typeof sort === 'function') {
+ return sort(a, b)
+ } else if (sort) {
+ return d3.ascending(name(a), name(b))
+ }
+ }
+
+ var p = d3.partition();
+
+ function filterNodes (root) {
+ var nodeList = root.descendants();
+ if (minFrameSize > 0) {
+ var kx = w / (root.x1 - root.x0);
+ nodeList = nodeList.filter(function (el) {
+ return ((el.x1 - el.x0) * kx) > minFrameSize
+ });
+ }
+ return nodeList
+ }
+
+ function update () {
+ selection.each(function (root) {
+ var x = d3.scaleLinear().range([0, w]);
+ var y = d3.scaleLinear().range([0, c]);
+
+ if (sort) root.sort(doSort);
+ root.sum(function (d) {
+ if (d.fade || d.hide) {
+ return 0
+ }
+ // The node's self value is its total value minus all children.
+ var v = value(d);
+ if (children(d)) {
+ var c = children(d);
+ for (var i = 0; i < c.length; i++) {
+ v -= value(c[i]);
+ }
+ }
+ return v
+ });
+ p(root);
+
+ var kx = w / (root.x1 - root.x0);
+ function width (d) { return (d.x1 - d.x0) * kx }
+
+ var descendants = filterNodes(root);
+ var g = d3.select(this).select('svg').selectAll('g').data(descendants, function (d) { return d.id });
+
+ g.transition()
+ .duration(transitionDuration)
+ .ease(transitionEase)
+ .attr('transform', function (d) { return 'translate(' + x(d.x0) + ',' + (inverted ? y(d.depth) : (h - y(d.depth) - c)) + ')' });
+
+ g.select('rect')
+ .attr('width', width);
+
+ var node = g.enter()
+ .append('svg:g')
+ .attr('transform', function (d) { return 'translate(' + x(d.x0) + ',' + (inverted ? y(d.depth) : (h - y(d.depth) - c)) + ')' });
+
+ node.append('svg:rect')
+ .transition()
+ .delay(transitionDuration / 2)
+ .attr('width', width);
+
+ if (!tooltip) { node.append('svg:title'); }
+
+ node.append('foreignObject')
+ .append('xhtml:div');
+
+ // Now we have to re-select to see the new elements (why?).
+ g = d3.select(this).select('svg').selectAll('g').data(descendants, function (d) { return d.id });
+
+ g.attr('width', width)
+ .attr('height', function (d) { return c })
+ .attr('name', function (d) { return name(d) })
+ .attr('class', function (d) { return d.data.fade ? 'frame fade' : 'frame' });
+
+ g.select('rect')
+ .attr('height', function (d) { return c })
+ .attr('fill', function (d) { return colorMapper(d) });
+
+ if (!tooltip) {
+ g.select('title')
+ .text(label);
+ }
+
+ g.select('foreignObject')
+ .attr('width', width)
+ .attr('height', function (d) { return c })
+ .select('div')
+ .attr('class', 'd3-flame-graph-label')
+ .style('display', function (d) { return (width(d) < 35) ? 'none' : 'block' })
+ .transition()
+ .delay(transitionDuration)
+ .text(name);
+
+ g.on('click', zoom);
+
+ g.exit()
+ .remove();
+
+ g.on('mouseover', function (d) {
+ if (tooltip) tip.show(d, this);
+ setDetails(label(d));
+ }).on('mouseout', function (d) {
+ if (tooltip) tip.hide(d);
+ setDetails('');
+ });
+ });
+ }
+
+ function merge (data, samples) {
+ samples.forEach(function (sample) {
+ var node = data.find(function (element) {
+ return (element.name === sample.name)
+ });
+
+ if (node) {
+ if (node.original) {
+ node.original += sample.value;
+ } else {
+ node.value += sample.value;
+ }
+ if (sample.children) {
+ if (!node.children) {
+ node.children = [];
+ }
+ merge(node.children, sample.children);
+ }
+ } else {
+ data.push(sample);
+ }
+ });
+ }
+
+ function s4 () {
+ return Math.floor((1 + Math.random()) * 0x10000)
+ .toString(16)
+ .substring(1)
+ }
+
+ function injectIds (node) {
+ node.id = s4() + '-' + s4() + '-' + '-' + s4() + '-' + s4();
+ var children = node.c || node.children || [];
+ for (var i = 0; i < children.length; i++) {
+ injectIds(children[i]);
+ }
+ }
+
+ function chart (s) {
+ var root = d3.hierarchy(
+ s.datum(), function (d) { return children(d) }
+ );
+ injectIds(root);
+ selection = s.datum(root);
+
+ if (!arguments.length) return chart
+
+ if (!h) {
+ h = (root.height + 2) * c;
+ }
+
+ selection.each(function (data) {
+ if (!svg) {
+ svg = d3.select(this)
+ .append('svg:svg')
+ .attr('width', w)
+ .attr('height', h)
+ .attr('class', 'partition d3-flame-graph')
+ .call(tip);
+
+ svg.append('svg:text')
+ .attr('class', 'title')
+ .attr('text-anchor', 'middle')
+ .attr('y', '25')
+ .attr('x', w / 2)
+ .attr('fill', '#808080')
+ .text(title);
+ }
+ });
+
+ // first draw
+ update();
+ }
+
+ chart.height = function (_) {
+ if (!arguments.length) { return h }
+ h = _;
+ return chart
+ };
+
+ chart.width = function (_) {
+ if (!arguments.length) { return w }
+ w = _;
+ return chart
+ };
+
+ chart.cellHeight = function (_) {
+ if (!arguments.length) { return c }
+ c = _;
+ return chart
+ };
+
+ chart.tooltip = function (_) {
+ if (!arguments.length) { return tooltip }
+ if (typeof _ === 'function') {
+ tip = _;
+ }
+ tooltip = !!_;
+ return chart
+ };
+
+ chart.title = function (_) {
+ if (!arguments.length) { return title }
+ title = _;
+ return chart
+ };
+
+ chart.transitionDuration = function (_) {
+ if (!arguments.length) { return transitionDuration }
+ transitionDuration = _;
+ return chart
+ };
+
+ chart.transitionEase = function (_) {
+ if (!arguments.length) { return transitionEase }
+ transitionEase = _;
+ return chart
+ };
+
+ chart.sort = function (_) {
+ if (!arguments.length) { return sort }
+ sort = _;
+ return chart
+ };
+
+ chart.inverted = function (_) {
+ if (!arguments.length) { return inverted }
+ inverted = _;
+ return chart
+ };
+
+ chart.label = function (_) {
+ if (!arguments.length) { return label }
+ label = _;
+ return chart
+ };
+
+ chart.search = function (term) {
+ var searchResults = [];
+ selection.each(function (data) {
+ searchResults = searchTree(data, term);
+ update();
+ });
+ return searchResults
+ };
+
+ chart.clear = function () {
+ selection.each(function (data) {
+ clear(data);
+ update();
+ });
+ };
+
+ chart.zoomTo = function (d) {
+ zoom(d);
+ };
+
+ chart.resetZoom = function () {
+ selection.each(function (data) {
+ zoom(data); // zoom to root
+ });
+ };
+
+ chart.onClick = function (_) {
+ if (!arguments.length) {
+ return clickHandler
+ }
+ clickHandler = _;
+ return chart
+ };
+
+ chart.merge = function (samples) {
+ var newRoot; // Need to re-create hierarchy after data changes.
+ selection.each(function (root) {
+ merge([root.data], [samples]);
+ newRoot = d3.hierarchy(root.data, function (d) { return children(d) });
+ injectIds(newRoot);
+ });
+ selection = selection.datum(newRoot);
+ update();
+ };
+
+ chart.color = function (_) {
+ if (!arguments.length) { return colorMapper }
+ colorMapper = _;
+ return chart
+ };
+
+ chart.minFrameSize = function (_) {
+ if (!arguments.length) { return minFrameSize }
+ minFrameSize = _;
+ return chart
+ };
+
+ chart.details = function (_) {
+ if (!arguments.length) { return details }
+ details = _;
+ return chart
+ };
+
+ return chart
+};
+
+exports.flamegraph = flamegraph;
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+})));
+`
+
+// CSSSource returns the d3-flamegraph.css file
+const CSSSource = `
+.d3-flame-graph rect {
+ stroke: #EEEEEE;
+ fill-opacity: .8;
+}
+
+.d3-flame-graph rect:hover {
+ stroke: #474747;
+ stroke-width: 0.5;
+ cursor: pointer;
+}
+
+.d3-flame-graph-label {
+ pointer-events: none;
+ white-space: nowrap;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ font-size: 12px;
+ font-family: Verdana;
+ margin-left: 4px;
+ margin-right: 4px;
+ line-height: 1.5;
+ padding: 0 0 0;
+ font-weight: 400;
+ color: black;
+ text-align: left;
+}
+
+.d3-flame-graph .fade {
+ opacity: 0.6 !important;
+}
+
+.d3-flame-graph .title {
+ font-size: 20px;
+ font-family: Verdana;
+}
+
+.d3-flame-graph-tip {
+ line-height: 1;
+ font-family: Verdana;
+ font-size: 12px;
+ padding: 12px;
+ background: rgba(0, 0, 0, 0.8);
+ color: #fff;
+ border-radius: 2px;
+ pointer-events: none;
+}
+
+/* Creates a small triangle extender for the tooltip */
+.d3-flame-graph-tip:after {
+ box-sizing: border-box;
+ display: inline;
+ font-size: 10px;
+ width: 100%;
+ line-height: 1;
+ color: rgba(0, 0, 0, 0.8);
+ position: absolute;
+ pointer-events: none;
+}
+
+/* Northward tooltips */
+.d3-flame-graph-tip.n:after {
+ content: "\25BC";
+ margin: -1px 0 0 0;
+ top: 100%;
+ left: 0;
+ text-align: center;
+}
+
+/* Eastward tooltips */
+.d3-flame-graph-tip.e:after {
+ content: "\25C0";
+ margin: -4px 0 0 0;
+ top: 50%;
+ left: -8px;
+}
+
+/* Southward tooltips */
+.d3-flame-graph-tip.s:after {
+ content: "\25B2";
+ margin: 0 0 1px 0;
+ top: -8px;
+ left: 0;
+ text-align: center;
+}
+
+/* Westward tooltips */
+.d3-flame-graph-tip.w:after {
+ content: "\25B6";
+ margin: -4px 0 0 -1px;
+ top: 50%;
+ left: 100%;
+}
+`
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE
new file mode 100644
index 0000000..35bc174
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2009-2017 Andrea Leofreddi <a.leofreddi@vleo.net>. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are
+permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ 3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+The views and conclusions contained in the software and documentation are those of the
+authors and should not be interpreted as representing official policies, either expressed
+or implied, of Andrea Leofreddi.
diff --git a/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go
new file mode 100644
index 0000000..6ca08ad
--- /dev/null
+++ b/src/cmd/vendor/github.com/google/pprof/third_party/svgpan/svgpan.go
@@ -0,0 +1,297 @@
+// SVG pan and zoom library.
+// See copyright notice in string constant below.
+
+package svgpan
+
+// https://github.com/aleofreddi/svgpan
+
+// JSSource returns the svgpan.js file
+const JSSource = `
+/**
+ * SVGPan library 1.2.2
+ * ======================
+ *
+ * Given an unique existing element with id "viewport" (or when missing, the
+ * first g-element), including the library into any SVG adds the following
+ * capabilities:
+ *
+ * - Mouse panning
+ * - Mouse zooming (using the wheel)
+ * - Object dragging
+ *
+ * You can configure the behaviour of the pan/zoom/drag with the variables
+ * listed in the CONFIGURATION section of this file.
+ *
+ * Known issues:
+ *
+ * - Zooming (while panning) on Safari has still some issues
+ *
+ * Releases:
+ *
+ * 1.2.2, Tue Aug 30 17:21:56 CEST 2011, Andrea Leofreddi
+ * - Fixed viewBox on root tag (#7)
+ * - Improved zoom speed (#2)
+ *
+ * 1.2.1, Mon Jul 4 00:33:18 CEST 2011, Andrea Leofreddi
+ * - Fixed a regression with mouse wheel (now working on Firefox 5)
+ * - Working with viewBox attribute (#4)
+ * - Added "use strict;" and fixed resulting warnings (#5)
+ * - Added configuration variables, dragging is disabled by default (#3)
+ *
+ * 1.2, Sat Mar 20 08:42:50 GMT 2010, Zeng Xiaohui
+ * Fixed a bug with browser mouse handler interaction
+ *
+ * 1.1, Wed Feb 3 17:39:33 GMT 2010, Zeng Xiaohui
+ * Updated the zoom code to support the mouse wheel on Safari/Chrome
+ *
+ * 1.0, Andrea Leofreddi
+ * First release
+ *
+ * This code is licensed under the following BSD license:
+ *
+ * Copyright 2009-2017 Andrea Leofreddi <a.leofreddi@vleo.net>. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification, are
+ * permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holder nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS'' AND ANY EXPRESS
+ * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * The views and conclusions contained in the software and documentation are those of the
+ * authors and should not be interpreted as representing official policies, either expressed
+ * or implied, of Andrea Leofreddi.
+ */
+
+"use strict";
+
+/// CONFIGURATION
+/// ====>
+
+var enablePan = 1; // 1 or 0: enable or disable panning (default enabled)
+var enableZoom = 1; // 1 or 0: enable or disable zooming (default enabled)
+var enableDrag = 0; // 1 or 0: enable or disable dragging (default disabled)
+var zoomScale = 0.2; // Zoom sensitivity
+
+/// <====
+/// END OF CONFIGURATION
+
+var root = document.documentElement;
+
+var state = 'none', svgRoot = null, stateTarget, stateOrigin, stateTf;
+
+setupHandlers(root);
+
+/**
+ * Register handlers
+ */
+function setupHandlers(root){
+ setAttributes(root, {
+ "onmouseup" : "handleMouseUp(evt)",
+ "onmousedown" : "handleMouseDown(evt)",
+ "onmousemove" : "handleMouseMove(evt)",
+ //"onmouseout" : "handleMouseUp(evt)", // Decomment this to stop the pan functionality when dragging out of the SVG element
+ });
+
+ if(navigator.userAgent.toLowerCase().indexOf('webkit') >= 0)
+ window.addEventListener('mousewheel', handleMouseWheel, false); // Chrome/Safari
+ else
+ window.addEventListener('DOMMouseScroll', handleMouseWheel, false); // Others
+}
+
+/**
+ * Retrieves the root element for SVG manipulation. The element is then cached into the svgRoot global variable.
+ */
+function getRoot(root) {
+ if(svgRoot == null) {
+ var r = root.getElementById("viewport") ? root.getElementById("viewport") : root.documentElement, t = r;
+
+ while(t != root) {
+ if(t.getAttribute("viewBox")) {
+ setCTM(r, t.getCTM());
+
+ t.removeAttribute("viewBox");
+ }
+
+ t = t.parentNode;
+ }
+
+ svgRoot = r;
+ }
+
+ return svgRoot;
+}
+
+/**
+ * Instance an SVGPoint object with given event coordinates.
+ */
+function getEventPoint(evt) {
+ var p = root.createSVGPoint();
+
+ p.x = evt.clientX;
+ p.y = evt.clientY;
+
+ return p;
+}
+
+/**
+ * Sets the current transform matrix of an element.
+ */
+function setCTM(element, matrix) {
+ var s = "matrix(" + matrix.a + "," + matrix.b + "," + matrix.c + "," + matrix.d + "," + matrix.e + "," + matrix.f + ")";
+
+ element.setAttribute("transform", s);
+}
+
+/**
+ * Dumps a matrix to a string (useful for debug).
+ */
+function dumpMatrix(matrix) {
+ var s = "[ " + matrix.a + ", " + matrix.c + ", " + matrix.e + "\n " + matrix.b + ", " + matrix.d + ", " + matrix.f + "\n 0, 0, 1 ]";
+
+ return s;
+}
+
+/**
+ * Sets attributes of an element.
+ */
+function setAttributes(element, attributes){
+ for (var i in attributes)
+ element.setAttributeNS(null, i, attributes[i]);
+}
+
+/**
+ * Handle mouse wheel event.
+ */
+function handleMouseWheel(evt) {
+ if(!enableZoom)
+ return;
+
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ var delta;
+
+ if(evt.wheelDelta)
+ delta = evt.wheelDelta / 360; // Chrome/Safari
+ else
+ delta = evt.detail / -9; // Mozilla
+
+ var z = Math.pow(1 + zoomScale, delta);
+
+ var g = getRoot(svgDoc);
+
+ var p = getEventPoint(evt);
+
+ p = p.matrixTransform(g.getCTM().inverse());
+
+ // Compute new scale matrix in current mouse position
+ var k = root.createSVGMatrix().translate(p.x, p.y).scale(z).translate(-p.x, -p.y);
+
+ setCTM(g, g.getCTM().multiply(k));
+
+ if(typeof(stateTf) == "undefined")
+ stateTf = g.getCTM().inverse();
+
+ stateTf = stateTf.multiply(k.inverse());
+}
+
+/**
+ * Handle mouse move event.
+ */
+function handleMouseMove(evt) {
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ var g = getRoot(svgDoc);
+
+ if(state == 'pan' && enablePan) {
+ // Pan mode
+ var p = getEventPoint(evt).matrixTransform(stateTf);
+
+ setCTM(g, stateTf.inverse().translate(p.x - stateOrigin.x, p.y - stateOrigin.y));
+ } else if(state == 'drag' && enableDrag) {
+ // Drag mode
+ var p = getEventPoint(evt).matrixTransform(g.getCTM().inverse());
+
+ setCTM(stateTarget, root.createSVGMatrix().translate(p.x - stateOrigin.x, p.y - stateOrigin.y).multiply(g.getCTM().inverse()).multiply(stateTarget.getCTM()));
+
+ stateOrigin = p;
+ }
+}
+
+/**
+ * Handle click event.
+ */
+function handleMouseDown(evt) {
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ var g = getRoot(svgDoc);
+
+ if(
+ evt.target.tagName == "svg"
+ || !enableDrag // Pan anyway when drag is disabled and the user clicked on an element
+ ) {
+ // Pan mode
+ state = 'pan';
+
+ stateTf = g.getCTM().inverse();
+
+ stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
+ } else {
+ // Drag mode
+ state = 'drag';
+
+ stateTarget = evt.target;
+
+ stateTf = g.getCTM().inverse();
+
+ stateOrigin = getEventPoint(evt).matrixTransform(stateTf);
+ }
+}
+
+/**
+ * Handle mouse button release event.
+ */
+function handleMouseUp(evt) {
+ if(evt.preventDefault)
+ evt.preventDefault();
+
+ evt.returnValue = false;
+
+ var svgDoc = evt.target.ownerDocument;
+
+ if(state == 'pan' || state == 'drag') {
+ // Quit pan mode
+ state = '';
+ }
+}
+`
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore b/src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore
new file mode 100644
index 0000000..4a8b38f
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/.gitignore
@@ -0,0 +1,13 @@
+*.o
+*.a
+*.so
+._*
+.nfs.*
+a.out
+*~
+*.orig
+*.rej
+*.exe
+.*.swp
+core
+demangle.test
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE b/src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE
new file mode 100644
index 0000000..d29b372
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2015 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md b/src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md
new file mode 100644
index 0000000..ef3f94a
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/README.md
@@ -0,0 +1,3 @@
+# github.com/ianlancetaylor/demangle
+
+A Go package that can be used to demangle C++ symbol names.
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go b/src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go
new file mode 100644
index 0000000..ccbe5b3
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/ast.go
@@ -0,0 +1,3205 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package demangle
+
+import (
+ "fmt"
+ "strings"
+)
+
+// AST is an abstract syntax tree representing a C++ declaration.
+// This is sufficient for the demangler but is by no means a general C++ AST.
+type AST interface {
+ // Internal method to convert to demangled string.
+ print(*printState)
+
+ // Traverse each element of an AST. If the function returns
+ // false, traversal of children of that element is skipped.
+ Traverse(func(AST) bool)
+
+ // Copy an AST with possible transformations.
+ // If the skip function returns true, no copy is required.
+ // If the copy function returns nil, no copy is required.
+ // The Copy method will do the right thing if copy returns nil
+ // for some components of an AST but not others, so a good
+ // copy function will only return non-nil for AST values that
+ // need to change.
+ // Copy itself returns either a copy or nil.
+ Copy(copy func(AST) AST, skip func(AST) bool) AST
+
+ // Implement the fmt.GoStringer interface.
+ GoString() string
+ goString(indent int, field string) string
+}
+
+// ASTToString returns the demangled name of the AST.
+func ASTToString(a AST, options ...Option) string {
+ tparams := true
+ for _, o := range options {
+ switch o {
+ case NoTemplateParams:
+ tparams = false
+ }
+ }
+
+ ps := printState{tparams: tparams}
+ a.print(&ps)
+ return ps.buf.String()
+}
+
+// The printState type holds information needed to print an AST.
+type printState struct {
+ tparams bool // whether to print template parameters
+
+ buf strings.Builder
+ last byte // Last byte written to buffer.
+
+ // The inner field is a list of items to print for a type
+ // name. This is used by types to implement the inside-out
+ // C++ declaration syntax.
+ inner []AST
+
+ // The printing field is a list of items we are currently
+ // printing. This avoids endless recursion if a substitution
+ // reference creates a cycle in the graph.
+ printing []AST
+}
+
+// writeByte adds a byte to the string being printed.
+func (ps *printState) writeByte(b byte) {
+ ps.last = b
+ ps.buf.WriteByte(b)
+}
+
+// writeString adds a string to the string being printed.
+func (ps *printState) writeString(s string) {
+ if len(s) > 0 {
+ ps.last = s[len(s)-1]
+ }
+ ps.buf.WriteString(s)
+}
+
+// Print an AST.
+func (ps *printState) print(a AST) {
+ c := 0
+ for _, v := range ps.printing {
+ if v == a {
+ // We permit the type to appear once, and
+ // return without printing anything if we see
+ // it twice. This is for a case like
+ // _Z6outer2IsEPFilES1_, where the
+ // substitution is printed differently the
+ // second time because the set of inner types
+ // is different.
+ c++
+ if c > 1 {
+ return
+ }
+ }
+ }
+ ps.printing = append(ps.printing, a)
+
+ a.print(ps)
+
+ ps.printing = ps.printing[:len(ps.printing)-1]
+}
+
+// Name is an unqualified name.
+type Name struct {
+ Name string
+}
+
+func (n *Name) print(ps *printState) {
+ ps.writeString(n.Name)
+}
+
+func (n *Name) Traverse(fn func(AST) bool) {
+ fn(n)
+}
+
+func (n *Name) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(n) {
+ return nil
+ }
+ return fn(n)
+}
+
+func (n *Name) GoString() string {
+ return n.goString(0, "Name: ")
+}
+
+func (n *Name) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%s%s", indent, "", field, n.Name)
+}
+
+// Typed is a typed name.
+type Typed struct {
+ Name AST
+ Type AST
+}
+
+func (t *Typed) print(ps *printState) {
+ // We are printing a typed name, so ignore the current set of
+ // inner names to print. Pass down our name as the one to use.
+ holdInner := ps.inner
+ defer func() { ps.inner = holdInner }()
+
+ ps.inner = []AST{t}
+ ps.print(t.Type)
+ if len(ps.inner) > 0 {
+ // The type did not print the name; print it now in
+ // the default location.
+ ps.writeByte(' ')
+ ps.print(t.Name)
+ }
+}
+
+func (t *Typed) printInner(ps *printState) {
+ ps.print(t.Name)
+}
+
+func (t *Typed) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Name.Traverse(fn)
+ t.Type.Traverse(fn)
+ }
+}
+
+func (t *Typed) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ name := t.Name.Copy(fn, skip)
+ typ := t.Type.Copy(fn, skip)
+ if name == nil && typ == nil {
+ return fn(t)
+ }
+ if name == nil {
+ name = t.Name
+ }
+ if typ == nil {
+ typ = t.Type
+ }
+ t = &Typed{Name: name, Type: typ}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *Typed) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *Typed) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTyped:\n%s\n%s", indent, "", field,
+ t.Name.goString(indent+2, "Name: "),
+ t.Type.goString(indent+2, "Type: "))
+}
+
+// Qualified is a name in a scope.
+type Qualified struct {
+ Scope AST
+ Name AST
+
+ // The LocalName field is true if this is parsed as a
+ // <local-name>. We shouldn't really need this, but in some
+ // cases (for the unary sizeof operator) the standard
+ // demangler prints a local name slightly differently. We
+ // keep track of this for compatibility.
+ LocalName bool // A full local name encoding
+}
+
+func (q *Qualified) print(ps *printState) {
+ ps.print(q.Scope)
+ ps.writeString("::")
+ ps.print(q.Name)
+}
+
+func (q *Qualified) Traverse(fn func(AST) bool) {
+ if fn(q) {
+ q.Scope.Traverse(fn)
+ q.Name.Traverse(fn)
+ }
+}
+
+func (q *Qualified) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(q) {
+ return nil
+ }
+ scope := q.Scope.Copy(fn, skip)
+ name := q.Name.Copy(fn, skip)
+ if scope == nil && name == nil {
+ return fn(q)
+ }
+ if scope == nil {
+ scope = q.Scope
+ }
+ if name == nil {
+ name = q.Name
+ }
+ q = &Qualified{Scope: scope, Name: name, LocalName: q.LocalName}
+ if r := fn(q); r != nil {
+ return r
+ }
+ return q
+}
+
+func (q *Qualified) GoString() string {
+ return q.goString(0, "")
+}
+
+func (q *Qualified) goString(indent int, field string) string {
+ s := ""
+ if q.LocalName {
+ s = " LocalName: true"
+ }
+ return fmt.Sprintf("%*s%sQualified:%s\n%s\n%s", indent, "", field,
+ s, q.Scope.goString(indent+2, "Scope: "),
+ q.Name.goString(indent+2, "Name: "))
+}
+
+// Template is a template with arguments.
+type Template struct {
+ Name AST
+ Args []AST
+}
+
+func (t *Template) print(ps *printState) {
+ // Inner types apply to the template as a whole, they don't
+ // cross over into the template.
+ holdInner := ps.inner
+ defer func() { ps.inner = holdInner }()
+
+ ps.inner = nil
+ ps.print(t.Name)
+
+ if !ps.tparams {
+ // Do not print template parameters.
+ return
+ }
+ // We need an extra space after operator<.
+ if ps.last == '<' {
+ ps.writeByte(' ')
+ }
+
+ ps.writeByte('<')
+ first := true
+ for _, a := range t.Args {
+ if ps.isEmpty(a) {
+ continue
+ }
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ first = false
+ }
+ if ps.last == '>' {
+ // Avoid syntactic ambiguity in old versions of C++.
+ ps.writeByte(' ')
+ }
+ ps.writeByte('>')
+}
+
+func (t *Template) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Name.Traverse(fn)
+ for _, a := range t.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (t *Template) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ name := t.Name.Copy(fn, skip)
+ changed := name != nil
+ args := make([]AST, len(t.Args))
+ for i, a := range t.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(t)
+ }
+ if name == nil {
+ name = t.Name
+ }
+ t = &Template{Name: name, Args: args}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *Template) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *Template) goString(indent int, field string) string {
+ var args string
+ if len(t.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range t.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sTemplate (%p):\n%s\n%s", indent, "", field, t,
+ t.Name.goString(indent+2, "Name: "), args)
+}
+
+// TemplateParam is a template parameter. The Template field is
+// filled in while parsing the demangled string. We don't normally
+// see these while printing--they are replaced by the simplify
+// function.
+type TemplateParam struct {
+ Index int
+ Template *Template
+}
+
+func (tp *TemplateParam) print(ps *printState) {
+ if tp.Template == nil {
+ panic("TemplateParam Template field is nil")
+ }
+ if tp.Index >= len(tp.Template.Args) {
+ panic("TemplateParam Index out of bounds")
+ }
+ ps.print(tp.Template.Args[tp.Index])
+}
+
+func (tp *TemplateParam) Traverse(fn func(AST) bool) {
+ fn(tp)
+ // Don't traverse Template--it points elsewhere in the AST.
+}
+
+func (tp *TemplateParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(tp) {
+ return nil
+ }
+ return fn(tp)
+}
+
+func (tp *TemplateParam) GoString() string {
+ return tp.goString(0, "")
+}
+
+func (tp *TemplateParam) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTemplateParam: Template: %p; Index %d", indent, "", field, tp.Template, tp.Index)
+}
+
+// LambdaAuto is a lambda auto parameter.
+type LambdaAuto struct {
+ Index int
+}
+
+func (la *LambdaAuto) print(ps *printState) {
+ // We print the index plus 1 because that is what the standard
+ // demangler does.
+ fmt.Fprintf(&ps.buf, "auto:%d", la.Index+1)
+}
+
+func (la *LambdaAuto) Traverse(fn func(AST) bool) {
+ fn(la)
+}
+
+func (la *LambdaAuto) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(la) {
+ return nil
+ }
+ return fn(la)
+}
+
+func (la *LambdaAuto) GoString() string {
+ return la.goString(0, "")
+}
+
+func (la *LambdaAuto) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sLambdaAuto: Index %d", indent, "", field, la.Index)
+}
+
+// Qualifiers is an ordered list of type qualifiers.
+type Qualifiers struct {
+ Qualifiers []AST
+}
+
+func (qs *Qualifiers) print(ps *printState) {
+ first := true
+ for _, q := range qs.Qualifiers {
+ if !first {
+ ps.writeByte(' ')
+ }
+ q.print(ps)
+ first = false
+ }
+}
+
+func (qs *Qualifiers) Traverse(fn func(AST) bool) {
+ if fn(qs) {
+ for _, q := range qs.Qualifiers {
+ q.Traverse(fn)
+ }
+ }
+}
+
+func (qs *Qualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(qs) {
+ return nil
+ }
+ changed := false
+ qualifiers := make([]AST, len(qs.Qualifiers))
+ for i, q := range qs.Qualifiers {
+ qc := q.Copy(fn, skip)
+ if qc == nil {
+ qualifiers[i] = q
+ } else {
+ qualifiers[i] = qc
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(qs)
+ }
+ qs = &Qualifiers{Qualifiers: qualifiers}
+ if r := fn(qs); r != nil {
+ return r
+ }
+ return qs
+}
+
+func (qs *Qualifiers) GoString() string {
+ return qs.goString(0, "")
+}
+
+func (qs *Qualifiers) goString(indent int, field string) string {
+ quals := fmt.Sprintf("%*s%s", indent, "", field)
+ for _, q := range qs.Qualifiers {
+ quals += "\n"
+ quals += q.goString(indent+2, "")
+ }
+ return quals
+}
+
+// Qualifier is a single type qualifier.
+type Qualifier struct {
+ Name string // qualifier name: const, volatile, etc.
+ Exprs []AST // can be non-nil for noexcept and throw
+}
+
+func (q *Qualifier) print(ps *printState) {
+ ps.writeString(q.Name)
+ if len(q.Exprs) > 0 {
+ ps.writeByte('(')
+ first := true
+ for _, e := range q.Exprs {
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(e)
+ first = false
+ }
+ ps.writeByte(')')
+ }
+}
+
+func (q *Qualifier) Traverse(fn func(AST) bool) {
+ if fn(q) {
+ for _, e := range q.Exprs {
+ e.Traverse(fn)
+ }
+ }
+}
+
+func (q *Qualifier) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(q) {
+ return nil
+ }
+ exprs := make([]AST, len(q.Exprs))
+ changed := false
+ for i, e := range q.Exprs {
+ ec := e.Copy(fn, skip)
+ if ec == nil {
+ exprs[i] = e
+ } else {
+ exprs[i] = ec
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(q)
+ }
+ q = &Qualifier{Name: q.Name, Exprs: exprs}
+ if r := fn(q); r != nil {
+ return r
+ }
+ return q
+}
+
+func (q *Qualifier) GoString() string {
+ return q.goString(0, "Qualifier: ")
+}
+
+func (q *Qualifier) goString(indent int, field string) string {
+ qs := fmt.Sprintf("%*s%s%s", indent, "", field, q.Name)
+ if len(q.Exprs) > 0 {
+ for i, e := range q.Exprs {
+ qs += "\n"
+ qs += e.goString(indent+2, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return qs
+}
+
+// TypeWithQualifiers is a type with standard qualifiers.
+type TypeWithQualifiers struct {
+ Base AST
+ Qualifiers AST
+}
+
+func (twq *TypeWithQualifiers) print(ps *printState) {
+ // Give the base type a chance to print the inner types.
+ ps.inner = append(ps.inner, twq)
+ ps.print(twq.Base)
+ if len(ps.inner) > 0 {
+ // The qualifier wasn't printed by Base.
+ ps.writeByte(' ')
+ ps.print(twq.Qualifiers)
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+// Print qualifiers as an inner type by just printing the qualifiers.
+func (twq *TypeWithQualifiers) printInner(ps *printState) {
+ ps.writeByte(' ')
+ ps.print(twq.Qualifiers)
+}
+
+func (twq *TypeWithQualifiers) Traverse(fn func(AST) bool) {
+ if fn(twq) {
+ twq.Base.Traverse(fn)
+ }
+}
+
+func (twq *TypeWithQualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(twq) {
+ return nil
+ }
+ base := twq.Base.Copy(fn, skip)
+ quals := twq.Qualifiers.Copy(fn, skip)
+ if base == nil && quals == nil {
+ return fn(twq)
+ }
+ if base == nil {
+ base = twq.Base
+ }
+ if quals == nil {
+ quals = twq.Qualifiers
+ }
+ twq = &TypeWithQualifiers{Base: base, Qualifiers: quals}
+ if r := fn(twq); r != nil {
+ return r
+ }
+ return twq
+}
+
+func (twq *TypeWithQualifiers) GoString() string {
+ return twq.goString(0, "")
+}
+
+func (twq *TypeWithQualifiers) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTypeWithQualifiers:\n%s\n%s", indent, "", field,
+ twq.Qualifiers.goString(indent+2, "Qualifiers: "),
+ twq.Base.goString(indent+2, "Base: "))
+}
+
+// MethodWithQualifiers is a method with qualifiers.
+type MethodWithQualifiers struct {
+ Method AST
+ Qualifiers AST
+ RefQualifier string // "" or "&" or "&&"
+}
+
+func (mwq *MethodWithQualifiers) print(ps *printState) {
+ // Give the base type a chance to print the inner types.
+ ps.inner = append(ps.inner, mwq)
+ ps.print(mwq.Method)
+ if len(ps.inner) > 0 {
+ if mwq.Qualifiers != nil {
+ ps.writeByte(' ')
+ ps.print(mwq.Qualifiers)
+ }
+ if mwq.RefQualifier != "" {
+ ps.writeByte(' ')
+ ps.writeString(mwq.RefQualifier)
+ }
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+func (mwq *MethodWithQualifiers) printInner(ps *printState) {
+ if mwq.Qualifiers != nil {
+ ps.writeByte(' ')
+ ps.print(mwq.Qualifiers)
+ }
+ if mwq.RefQualifier != "" {
+ ps.writeByte(' ')
+ ps.writeString(mwq.RefQualifier)
+ }
+}
+
+func (mwq *MethodWithQualifiers) Traverse(fn func(AST) bool) {
+ if fn(mwq) {
+ mwq.Method.Traverse(fn)
+ }
+}
+
+func (mwq *MethodWithQualifiers) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(mwq) {
+ return nil
+ }
+ method := mwq.Method.Copy(fn, skip)
+ var quals AST
+ if mwq.Qualifiers != nil {
+ quals = mwq.Qualifiers.Copy(fn, skip)
+ }
+ if method == nil && quals == nil {
+ return fn(mwq)
+ }
+ if method == nil {
+ method = mwq.Method
+ }
+ if quals == nil {
+ quals = mwq.Qualifiers
+ }
+ mwq = &MethodWithQualifiers{Method: method, Qualifiers: quals, RefQualifier: mwq.RefQualifier}
+ if r := fn(mwq); r != nil {
+ return r
+ }
+ return mwq
+}
+
+func (mwq *MethodWithQualifiers) GoString() string {
+ return mwq.goString(0, "")
+}
+
+func (mwq *MethodWithQualifiers) goString(indent int, field string) string {
+ var q string
+ if mwq.Qualifiers != nil {
+ q += "\n" + mwq.Qualifiers.goString(indent+2, "Qualifiers: ")
+ }
+ if mwq.RefQualifier != "" {
+ if q != "" {
+ q += "\n"
+ }
+ q += fmt.Sprintf("%*s%s%s", indent+2, "", "RefQualifier: ", mwq.RefQualifier)
+ }
+ return fmt.Sprintf("%*s%sMethodWithQualifiers:%s\n%s", indent, "", field,
+ q, mwq.Method.goString(indent+2, "Method: "))
+}
+
+// BuiltinType is a builtin type, like "int".
+type BuiltinType struct {
+ Name string
+}
+
+func (bt *BuiltinType) print(ps *printState) {
+ ps.writeString(bt.Name)
+}
+
+func (bt *BuiltinType) Traverse(fn func(AST) bool) {
+ fn(bt)
+}
+
+func (bt *BuiltinType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(bt) {
+ return nil
+ }
+ return fn(bt)
+}
+
+func (bt *BuiltinType) GoString() string {
+ return bt.goString(0, "")
+}
+
+func (bt *BuiltinType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sBuiltinType: %s", indent, "", field, bt.Name)
+}
+
+// printBase is common print code for types that are printed with a
+// simple suffix.
+func printBase(ps *printState, qual, base AST) {
+ ps.inner = append(ps.inner, qual)
+ ps.print(base)
+ if len(ps.inner) > 0 {
+ qual.(innerPrinter).printInner(ps)
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ }
+}
+
+// PointerType is a pointer type.
+type PointerType struct {
+ Base AST
+}
+
+func (pt *PointerType) print(ps *printState) {
+ printBase(ps, pt, pt.Base)
+}
+
+func (pt *PointerType) printInner(ps *printState) {
+ ps.writeString("*")
+}
+
+func (pt *PointerType) Traverse(fn func(AST) bool) {
+ if fn(pt) {
+ pt.Base.Traverse(fn)
+ }
+}
+
+func (pt *PointerType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pt) {
+ return nil
+ }
+ base := pt.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(pt)
+ }
+ pt = &PointerType{Base: base}
+ if r := fn(pt); r != nil {
+ return r
+ }
+ return pt
+}
+
+func (pt *PointerType) GoString() string {
+ return pt.goString(0, "")
+}
+
+func (pt *PointerType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPointerType:\n%s", indent, "", field,
+ pt.Base.goString(indent+2, ""))
+}
+
+// ReferenceType is a reference type.
+type ReferenceType struct {
+ Base AST
+}
+
+func (rt *ReferenceType) print(ps *printState) {
+ printBase(ps, rt, rt.Base)
+}
+
+func (rt *ReferenceType) printInner(ps *printState) {
+ ps.writeString("&")
+}
+
+func (rt *ReferenceType) Traverse(fn func(AST) bool) {
+ if fn(rt) {
+ rt.Base.Traverse(fn)
+ }
+}
+
+func (rt *ReferenceType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(rt) {
+ return nil
+ }
+ base := rt.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(rt)
+ }
+ rt = &ReferenceType{Base: base}
+ if r := fn(rt); r != nil {
+ return r
+ }
+ return rt
+}
+
+func (rt *ReferenceType) GoString() string {
+ return rt.goString(0, "")
+}
+
+func (rt *ReferenceType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sReferenceType:\n%s", indent, "", field,
+ rt.Base.goString(indent+2, ""))
+}
+
+// RvalueReferenceType is an rvalue reference type.
+type RvalueReferenceType struct {
+ Base AST
+}
+
+func (rt *RvalueReferenceType) print(ps *printState) {
+ printBase(ps, rt, rt.Base)
+}
+
+func (rt *RvalueReferenceType) printInner(ps *printState) {
+ ps.writeString("&&")
+}
+
+func (rt *RvalueReferenceType) Traverse(fn func(AST) bool) {
+ if fn(rt) {
+ rt.Base.Traverse(fn)
+ }
+}
+
+func (rt *RvalueReferenceType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(rt) {
+ return nil
+ }
+ base := rt.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(rt)
+ }
+ rt = &RvalueReferenceType{Base: base}
+ if r := fn(rt); r != nil {
+ return r
+ }
+ return rt
+}
+
+func (rt *RvalueReferenceType) GoString() string {
+ return rt.goString(0, "")
+}
+
+func (rt *RvalueReferenceType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sRvalueReferenceType:\n%s", indent, "", field,
+ rt.Base.goString(indent+2, ""))
+}
+
+// ComplexType is a complex type.
+type ComplexType struct {
+ Base AST
+}
+
+func (ct *ComplexType) print(ps *printState) {
+ printBase(ps, ct, ct.Base)
+}
+
+func (ct *ComplexType) printInner(ps *printState) {
+ ps.writeString(" _Complex")
+}
+
+func (ct *ComplexType) Traverse(fn func(AST) bool) {
+ if fn(ct) {
+ ct.Base.Traverse(fn)
+ }
+}
+
+func (ct *ComplexType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ct) {
+ return nil
+ }
+ base := ct.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(ct)
+ }
+ ct = &ComplexType{Base: base}
+ if r := fn(ct); r != nil {
+ return r
+ }
+ return ct
+}
+
+func (ct *ComplexType) GoString() string {
+ return ct.goString(0, "")
+}
+
+func (ct *ComplexType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sComplexType:\n%s", indent, "", field,
+ ct.Base.goString(indent+2, ""))
+}
+
+// ImaginaryType is an imaginary type.
+type ImaginaryType struct {
+ Base AST
+}
+
+func (it *ImaginaryType) print(ps *printState) {
+ printBase(ps, it, it.Base)
+}
+
+func (it *ImaginaryType) printInner(ps *printState) {
+ ps.writeString(" _Imaginary")
+}
+
+func (it *ImaginaryType) Traverse(fn func(AST) bool) {
+ if fn(it) {
+ it.Base.Traverse(fn)
+ }
+}
+
+func (it *ImaginaryType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(it) {
+ return nil
+ }
+ base := it.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(it)
+ }
+ it = &ImaginaryType{Base: base}
+ if r := fn(it); r != nil {
+ return r
+ }
+ return it
+}
+
+func (it *ImaginaryType) GoString() string {
+ return it.goString(0, "")
+}
+
+func (it *ImaginaryType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sImaginaryType:\n%s", indent, "", field,
+ it.Base.goString(indent+2, ""))
+}
+
+// VendorQualifier is a type qualified by a vendor-specific qualifier.
+type VendorQualifier struct {
+ Qualifier AST
+ Type AST
+}
+
+func (vq *VendorQualifier) print(ps *printState) {
+ ps.inner = append(ps.inner, vq)
+ ps.print(vq.Type)
+ if len(ps.inner) > 0 {
+ ps.printOneInner(nil)
+ }
+}
+
+func (vq *VendorQualifier) printInner(ps *printState) {
+ ps.writeByte(' ')
+ ps.print(vq.Qualifier)
+}
+
+func (vq *VendorQualifier) Traverse(fn func(AST) bool) {
+ if fn(vq) {
+ vq.Qualifier.Traverse(fn)
+ vq.Type.Traverse(fn)
+ }
+}
+
+func (vq *VendorQualifier) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(vq) {
+ return nil
+ }
+ qualifier := vq.Qualifier.Copy(fn, skip)
+ typ := vq.Type.Copy(fn, skip)
+ if qualifier == nil && typ == nil {
+ return fn(vq)
+ }
+ if qualifier == nil {
+ qualifier = vq.Qualifier
+ }
+ if typ == nil {
+ typ = vq.Type
+ }
+ vq = &VendorQualifier{Qualifier: qualifier, Type: vq.Type}
+ if r := fn(vq); r != nil {
+ return r
+ }
+ return vq
+}
+
+func (vq *VendorQualifier) GoString() string {
+ return vq.goString(0, "")
+}
+
+func (vq *VendorQualifier) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sVendorQualifier:\n%s\n%s", indent, "", field,
+ vq.Qualifier.goString(indent+2, "Qualifier: "),
+ vq.Type.goString(indent+2, "Type: "))
+}
+
+// ArrayType is an array type.
+type ArrayType struct {
+ Dimension AST
+ Element AST
+}
+
+func (at *ArrayType) print(ps *printState) {
+ // Pass the array type down as an inner type so that we print
+ // multi-dimensional arrays correctly.
+ ps.inner = append(ps.inner, at)
+ ps.print(at.Element)
+ if ln := len(ps.inner); ln > 0 {
+ ps.inner = ps.inner[:ln-1]
+ at.printDimension(ps)
+ }
+}
+
+func (at *ArrayType) printInner(ps *printState) {
+ at.printDimension(ps)
+}
+
+// Print the array dimension.
+func (at *ArrayType) printDimension(ps *printState) {
+ space := " "
+ for len(ps.inner) > 0 {
+ // We haven't gotten to the real type yet. Use
+ // parentheses around that type, except that if it is
+ // an array type we print it as a multi-dimensional
+ // array
+ in := ps.inner[len(ps.inner)-1]
+ if twq, ok := in.(*TypeWithQualifiers); ok {
+ in = twq.Base
+ }
+ if _, ok := in.(*ArrayType); ok {
+ if in == ps.inner[len(ps.inner)-1] {
+ space = ""
+ }
+ ps.printOneInner(nil)
+ } else {
+ ps.writeString(" (")
+ ps.printInner(false)
+ ps.writeByte(')')
+ }
+ }
+ ps.writeString(space)
+ ps.writeByte('[')
+ ps.print(at.Dimension)
+ ps.writeByte(']')
+}
+
+func (at *ArrayType) Traverse(fn func(AST) bool) {
+ if fn(at) {
+ at.Dimension.Traverse(fn)
+ at.Element.Traverse(fn)
+ }
+}
+
+func (at *ArrayType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(at) {
+ return nil
+ }
+ dimension := at.Dimension.Copy(fn, skip)
+ element := at.Element.Copy(fn, skip)
+ if dimension == nil && element == nil {
+ return fn(at)
+ }
+ if dimension == nil {
+ dimension = at.Dimension
+ }
+ if element == nil {
+ element = at.Element
+ }
+ at = &ArrayType{Dimension: dimension, Element: element}
+ if r := fn(at); r != nil {
+ return r
+ }
+ return at
+}
+
+func (at *ArrayType) GoString() string {
+ return at.goString(0, "")
+}
+
+func (at *ArrayType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sArrayType:\n%s\n%s", indent, "", field,
+ at.Dimension.goString(indent+2, "Dimension: "),
+ at.Element.goString(indent+2, "Element: "))
+}
+
+// FunctionType is a function type. The Return field may be nil for
+// cases where the return type is not part of the mangled name.
+type FunctionType struct {
+ Return AST
+ Args []AST
+}
+
+func (ft *FunctionType) print(ps *printState) {
+ if ft.Return != nil {
+ // Pass the return type as an inner type in order to
+ // print the arguments in the right location.
+ ps.inner = append(ps.inner, ft)
+ ps.print(ft.Return)
+ if len(ps.inner) == 0 {
+ // Everything was printed.
+ return
+ }
+ ps.inner = ps.inner[:len(ps.inner)-1]
+ ps.writeByte(' ')
+ }
+ ft.printArgs(ps)
+}
+
+func (ft *FunctionType) printInner(ps *printState) {
+ ft.printArgs(ps)
+}
+
+// printArgs prints the arguments of a function type. It looks at the
+// inner types for spacing.
+func (ft *FunctionType) printArgs(ps *printState) {
+ paren := false
+ space := false
+ for i := len(ps.inner) - 1; i >= 0; i-- {
+ switch ps.inner[i].(type) {
+ case *PointerType, *ReferenceType, *RvalueReferenceType:
+ paren = true
+ case *TypeWithQualifiers, *ComplexType, *ImaginaryType, *PtrMem:
+ space = true
+ paren = true
+ }
+ if paren {
+ break
+ }
+ }
+
+ if paren {
+ if !space && (ps.last != '(' && ps.last != '*') {
+ space = true
+ }
+ if space && ps.last != ' ' {
+ ps.writeByte(' ')
+ }
+ ps.writeByte('(')
+ }
+
+ save := ps.printInner(true)
+
+ if paren {
+ ps.writeByte(')')
+ }
+
+ ps.writeByte('(')
+ first := true
+ for _, a := range ft.Args {
+ if ps.isEmpty(a) {
+ continue
+ }
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ first = false
+ }
+ ps.writeByte(')')
+
+ ps.inner = save
+ ps.printInner(false)
+}
+
+func (ft *FunctionType) Traverse(fn func(AST) bool) {
+ if fn(ft) {
+ if ft.Return != nil {
+ ft.Return.Traverse(fn)
+ }
+ for _, a := range ft.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (ft *FunctionType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ft) {
+ return nil
+ }
+ changed := false
+ var ret AST
+ if ft.Return != nil {
+ ret = ft.Return.Copy(fn, skip)
+ if ret == nil {
+ ret = ft.Return
+ } else {
+ changed = true
+ }
+ }
+ args := make([]AST, len(ft.Args))
+ for i, a := range ft.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(ft)
+ }
+ ft = &FunctionType{Return: ret, Args: args}
+ if r := fn(ft); r != nil {
+ return r
+ }
+ return ft
+}
+
+func (ft *FunctionType) GoString() string {
+ return ft.goString(0, "")
+}
+
+func (ft *FunctionType) goString(indent int, field string) string {
+ var r string
+ if ft.Return == nil {
+ r = fmt.Sprintf("%*sReturn: nil", indent+2, "")
+ } else {
+ r = ft.Return.goString(indent+2, "Return: ")
+ }
+ var args string
+ if len(ft.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range ft.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sFunctionType:\n%s\n%s", indent, "", field, r, args)
+}
+
+// FunctionParam is a parameter of a function, used for last-specified
+// return type in a closure.
+type FunctionParam struct {
+ Index int
+}
+
+func (fp *FunctionParam) print(ps *printState) {
+ if fp.Index == 0 {
+ ps.writeString("this")
+ } else {
+ fmt.Fprintf(&ps.buf, "{parm#%d}", fp.Index)
+ }
+}
+
+func (fp *FunctionParam) Traverse(fn func(AST) bool) {
+ fn(fp)
+}
+
+func (fp *FunctionParam) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(fp) {
+ return nil
+ }
+ return fn(fp)
+}
+
+func (fp *FunctionParam) GoString() string {
+ return fp.goString(0, "")
+}
+
+func (fp *FunctionParam) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sFunctionParam: %d", indent, "", field, fp.Index)
+}
+
+// PtrMem is a pointer-to-member expression.
+type PtrMem struct {
+ Class AST
+ Member AST
+}
+
+func (pm *PtrMem) print(ps *printState) {
+ ps.inner = append(ps.inner, pm)
+ ps.print(pm.Member)
+ if len(ps.inner) > 0 {
+ ps.printOneInner(nil)
+ }
+}
+
+func (pm *PtrMem) printInner(ps *printState) {
+ if ps.last != '(' {
+ ps.writeByte(' ')
+ }
+ ps.print(pm.Class)
+ ps.writeString("::*")
+}
+
+func (pm *PtrMem) Traverse(fn func(AST) bool) {
+ if fn(pm) {
+ pm.Class.Traverse(fn)
+ pm.Member.Traverse(fn)
+ }
+}
+
+func (pm *PtrMem) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pm) {
+ return nil
+ }
+ class := pm.Class.Copy(fn, skip)
+ member := pm.Member.Copy(fn, skip)
+ if class == nil && member == nil {
+ return fn(pm)
+ }
+ if class == nil {
+ class = pm.Class
+ }
+ if member == nil {
+ member = pm.Member
+ }
+ pm = &PtrMem{Class: class, Member: member}
+ if r := fn(pm); r != nil {
+ return r
+ }
+ return pm
+}
+
+func (pm *PtrMem) GoString() string {
+ return pm.goString(0, "")
+}
+
+func (pm *PtrMem) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPtrMem:\n%s\n%s", indent, "", field,
+ pm.Class.goString(indent+2, "Class: "),
+ pm.Member.goString(indent+2, "Member: "))
+}
+
+// FixedType is a fixed numeric type of unknown size.
+type FixedType struct {
+ Base AST
+ Accum bool
+ Sat bool
+}
+
+func (ft *FixedType) print(ps *printState) {
+ if ft.Sat {
+ ps.writeString("_Sat ")
+ }
+ if bt, ok := ft.Base.(*BuiltinType); ok && bt.Name == "int" {
+ // The standard demangler skips printing "int".
+ } else {
+ ps.print(ft.Base)
+ ps.writeByte(' ')
+ }
+ if ft.Accum {
+ ps.writeString("_Accum")
+ } else {
+ ps.writeString("_Fract")
+ }
+}
+
+func (ft *FixedType) Traverse(fn func(AST) bool) {
+ if fn(ft) {
+ ft.Base.Traverse(fn)
+ }
+}
+
+func (ft *FixedType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ft) {
+ return nil
+ }
+ base := ft.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(ft)
+ }
+ ft = &FixedType{Base: base, Accum: ft.Accum, Sat: ft.Sat}
+ if r := fn(ft); r != nil {
+ return r
+ }
+ return ft
+}
+
+func (ft *FixedType) GoString() string {
+ return ft.goString(0, "")
+}
+
+func (ft *FixedType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sFixedType: Accum: %t; Sat: %t\n%s", indent, "", field,
+ ft.Accum, ft.Sat,
+ ft.Base.goString(indent+2, "Base: "))
+}
+
+// VectorType is a vector type.
+type VectorType struct {
+ Dimension AST
+ Base AST
+}
+
+func (vt *VectorType) print(ps *printState) {
+ ps.inner = append(ps.inner, vt)
+ ps.print(vt.Base)
+ if len(ps.inner) > 0 {
+ ps.printOneInner(nil)
+ }
+}
+
+func (vt *VectorType) printInner(ps *printState) {
+ ps.writeString(" __vector(")
+ ps.print(vt.Dimension)
+ ps.writeByte(')')
+}
+
+func (vt *VectorType) Traverse(fn func(AST) bool) {
+ if fn(vt) {
+ vt.Dimension.Traverse(fn)
+ vt.Base.Traverse(fn)
+ }
+}
+
+func (vt *VectorType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(vt) {
+ return nil
+ }
+ dimension := vt.Dimension.Copy(fn, skip)
+ base := vt.Base.Copy(fn, skip)
+ if dimension == nil && base == nil {
+ return fn(vt)
+ }
+ if dimension == nil {
+ dimension = vt.Dimension
+ }
+ if base == nil {
+ base = vt.Base
+ }
+ vt = &VectorType{Dimension: dimension, Base: base}
+ if r := fn(vt); r != nil {
+ return r
+ }
+ return vt
+}
+
+func (vt *VectorType) GoString() string {
+ return vt.goString(0, "")
+}
+
+func (vt *VectorType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sVectorType:\n%s\n%s", indent, "", field,
+ vt.Dimension.goString(indent+2, "Dimension: "),
+ vt.Base.goString(indent+2, "Base: "))
+}
+
+// Decltype is the decltype operator.
+type Decltype struct {
+ Expr AST
+}
+
+func (dt *Decltype) print(ps *printState) {
+ ps.writeString("decltype (")
+ ps.print(dt.Expr)
+ ps.writeByte(')')
+}
+
+func (dt *Decltype) Traverse(fn func(AST) bool) {
+ if fn(dt) {
+ dt.Expr.Traverse(fn)
+ }
+}
+
+func (dt *Decltype) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(dt) {
+ return nil
+ }
+ expr := dt.Expr.Copy(fn, skip)
+ if expr == nil {
+ return fn(dt)
+ }
+ dt = &Decltype{Expr: expr}
+ if r := fn(dt); r != nil {
+ return r
+ }
+ return dt
+}
+
+func (dt *Decltype) GoString() string {
+ return dt.goString(0, "")
+}
+
+func (dt *Decltype) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sDecltype:\n%s", indent, "", field,
+ dt.Expr.goString(indent+2, "Expr: "))
+}
+
+// Operator is an operator.
+type Operator struct {
+ Name string
+}
+
+func (op *Operator) print(ps *printState) {
+ ps.writeString("operator")
+ if isLower(op.Name[0]) {
+ ps.writeByte(' ')
+ }
+ n := op.Name
+ n = strings.TrimSuffix(n, " ")
+ ps.writeString(n)
+}
+
+func (op *Operator) Traverse(fn func(AST) bool) {
+ fn(op)
+}
+
+func (op *Operator) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(op) {
+ return nil
+ }
+ return fn(op)
+}
+
+func (op *Operator) GoString() string {
+ return op.goString(0, "")
+}
+
+func (op *Operator) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sOperator: %s", indent, "", field, op.Name)
+}
+
+// Constructor is a constructor.
+type Constructor struct {
+ Name AST
+}
+
+func (c *Constructor) print(ps *printState) {
+ ps.print(c.Name)
+}
+
+func (c *Constructor) Traverse(fn func(AST) bool) {
+ if fn(c) {
+ c.Name.Traverse(fn)
+ }
+}
+
+func (c *Constructor) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(c) {
+ return nil
+ }
+ name := c.Name.Copy(fn, skip)
+ if name == nil {
+ return fn(c)
+ }
+ c = &Constructor{Name: name}
+ if r := fn(c); r != nil {
+ return r
+ }
+ return c
+}
+
+func (c *Constructor) GoString() string {
+ return c.goString(0, "")
+}
+
+func (c *Constructor) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sConstructor:\n%s", indent, "", field, c.Name.goString(indent+2, "Name: "))
+}
+
+// Destructor is a destructor.
+type Destructor struct {
+ Name AST
+}
+
+func (d *Destructor) print(ps *printState) {
+ ps.writeByte('~')
+ ps.print(d.Name)
+}
+
+func (d *Destructor) Traverse(fn func(AST) bool) {
+ if fn(d) {
+ d.Name.Traverse(fn)
+ }
+}
+
+func (d *Destructor) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(d) {
+ return nil
+ }
+ name := d.Name.Copy(fn, skip)
+ if name == nil {
+ return fn(d)
+ }
+ d = &Destructor{Name: name}
+ if r := fn(d); r != nil {
+ return r
+ }
+ return d
+}
+
+func (d *Destructor) GoString() string {
+ return d.goString(0, "")
+}
+
+func (d *Destructor) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sDestructor:\n%s", indent, "", field, d.Name.goString(indent+2, "Name: "))
+}
+
+// GlobalCDtor is a global constructor or destructor.
+type GlobalCDtor struct {
+ Ctor bool
+ Key AST
+}
+
+func (gcd *GlobalCDtor) print(ps *printState) {
+ ps.writeString("global ")
+ if gcd.Ctor {
+ ps.writeString("constructors")
+ } else {
+ ps.writeString("destructors")
+ }
+ ps.writeString(" keyed to ")
+ ps.print(gcd.Key)
+}
+
+func (gcd *GlobalCDtor) Traverse(fn func(AST) bool) {
+ if fn(gcd) {
+ gcd.Key.Traverse(fn)
+ }
+}
+
+func (gcd *GlobalCDtor) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(gcd) {
+ return nil
+ }
+ key := gcd.Key.Copy(fn, skip)
+ if key == nil {
+ return fn(gcd)
+ }
+ gcd = &GlobalCDtor{Ctor: gcd.Ctor, Key: key}
+ if r := fn(gcd); r != nil {
+ return r
+ }
+ return gcd
+}
+
+func (gcd *GlobalCDtor) GoString() string {
+ return gcd.goString(0, "")
+}
+
+func (gcd *GlobalCDtor) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sGlobalCDtor: Ctor: %t\n%s", indent, "", field,
+ gcd.Ctor, gcd.Key.goString(indent+2, "Key: "))
+}
+
+// TaggedName is a name with an ABI tag.
+type TaggedName struct {
+ Name AST
+ Tag AST
+}
+
+func (t *TaggedName) print(ps *printState) {
+ ps.print(t.Name)
+ ps.writeString("[abi:")
+ ps.print(t.Tag)
+ ps.writeByte(']')
+}
+
+func (t *TaggedName) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Name.Traverse(fn)
+ t.Tag.Traverse(fn)
+ }
+}
+
+func (t *TaggedName) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ name := t.Name.Copy(fn, skip)
+ tag := t.Tag.Copy(fn, skip)
+ if name == nil && tag == nil {
+ return fn(t)
+ }
+ if name == nil {
+ name = t.Name
+ }
+ if tag == nil {
+ tag = t.Tag
+ }
+ t = &TaggedName{Name: name, Tag: tag}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *TaggedName) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *TaggedName) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTaggedName:\n%s\n%s", indent, "", field,
+ t.Name.goString(indent+2, "Name: "),
+ t.Tag.goString(indent+2, "Tag: "))
+}
+
+// PackExpansion is a pack expansion. The Pack field may be nil.
+type PackExpansion struct {
+ Base AST
+ Pack *ArgumentPack
+}
+
+func (pe *PackExpansion) print(ps *printState) {
+ // We normally only get here if the simplify function was
+ // unable to locate and expand the pack.
+ if pe.Pack == nil {
+ parenthesize(ps, pe.Base)
+ ps.writeString("...")
+ } else {
+ ps.print(pe.Base)
+ }
+}
+
+func (pe *PackExpansion) Traverse(fn func(AST) bool) {
+ if fn(pe) {
+ pe.Base.Traverse(fn)
+ // Don't traverse Template--it points elsewhere in the AST.
+ }
+}
+
+func (pe *PackExpansion) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(pe) {
+ return nil
+ }
+ base := pe.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(pe)
+ }
+ pe = &PackExpansion{Base: base, Pack: pe.Pack}
+ if r := fn(pe); r != nil {
+ return r
+ }
+ return pe
+}
+
+func (pe *PackExpansion) GoString() string {
+ return pe.goString(0, "")
+}
+
+func (pe *PackExpansion) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sPackExpansion: Pack: %p\n%s", indent, "", field,
+ pe.Pack, pe.Base.goString(indent+2, "Base: "))
+}
+
+// ArgumentPack is an argument pack.
+type ArgumentPack struct {
+ Args []AST
+}
+
+func (ap *ArgumentPack) print(ps *printState) {
+ for i, a := range ap.Args {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ }
+}
+
+func (ap *ArgumentPack) Traverse(fn func(AST) bool) {
+ if fn(ap) {
+ for _, a := range ap.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (ap *ArgumentPack) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ap) {
+ return nil
+ }
+ args := make([]AST, len(ap.Args))
+ changed := false
+ for i, a := range ap.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(ap)
+ }
+ ap = &ArgumentPack{Args: args}
+ if r := fn(ap); r != nil {
+ return r
+ }
+ return ap
+}
+
+func (ap *ArgumentPack) GoString() string {
+ return ap.goString(0, "")
+}
+
+func (ap *ArgumentPack) goString(indent int, field string) string {
+ if len(ap.Args) == 0 {
+ return fmt.Sprintf("%*s%sArgumentPack: nil", indent, "", field)
+ }
+ s := fmt.Sprintf("%*s%sArgumentPack:", indent, "", field)
+ for i, a := range ap.Args {
+ s += "\n"
+ s += a.goString(indent+2, fmt.Sprintf("%d: ", i))
+ }
+ return s
+}
+
+// SizeofPack is the sizeof operator applied to an argument pack.
+type SizeofPack struct {
+ Pack *ArgumentPack
+}
+
+func (sp *SizeofPack) print(ps *printState) {
+ ps.writeString(fmt.Sprintf("%d", len(sp.Pack.Args)))
+}
+
+func (sp *SizeofPack) Traverse(fn func(AST) bool) {
+ fn(sp)
+ // Don't traverse the pack--it points elsewhere in the AST.
+}
+
+func (sp *SizeofPack) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(sp) {
+ return nil
+ }
+ sp = &SizeofPack{Pack: sp.Pack}
+ if r := fn(sp); r != nil {
+ return r
+ }
+ return sp
+}
+
+func (sp *SizeofPack) GoString() string {
+ return sp.goString(0, "")
+}
+
+func (sp *SizeofPack) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sSizeofPack: Pack: %p", indent, "", field, sp.Pack)
+}
+
+// SizeofArgs is the size of a captured template parameter pack from
+// an alias template.
+type SizeofArgs struct {
+ Args []AST
+}
+
+func (sa *SizeofArgs) print(ps *printState) {
+ c := 0
+ for _, a := range sa.Args {
+ if ap, ok := a.(*ArgumentPack); ok {
+ c += len(ap.Args)
+ } else if el, ok := a.(*ExprList); ok {
+ c += len(el.Exprs)
+ } else {
+ c++
+ }
+ }
+ ps.writeString(fmt.Sprintf("%d", c))
+}
+
+func (sa *SizeofArgs) Traverse(fn func(AST) bool) {
+ if fn(sa) {
+ for _, a := range sa.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (sa *SizeofArgs) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(sa) {
+ return nil
+ }
+ changed := false
+ args := make([]AST, len(sa.Args))
+ for i, a := range sa.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(sa)
+ }
+ sa = &SizeofArgs{Args: args}
+ if r := fn(sa); r != nil {
+ return r
+ }
+ return sa
+}
+
+func (sa *SizeofArgs) GoString() string {
+ return sa.goString(0, "")
+}
+
+func (sa *SizeofArgs) goString(indent int, field string) string {
+ var args string
+ if len(sa.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range sa.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sSizeofArgs:\n%s", indent, "", field, args)
+}
+
+// Cast is a type cast.
+type Cast struct {
+ To AST
+}
+
+func (c *Cast) print(ps *printState) {
+ ps.writeString("operator ")
+ ps.print(c.To)
+}
+
+func (c *Cast) Traverse(fn func(AST) bool) {
+ if fn(c) {
+ c.To.Traverse(fn)
+ }
+}
+
+func (c *Cast) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(c) {
+ return nil
+ }
+ to := c.To.Copy(fn, skip)
+ if to == nil {
+ return fn(c)
+ }
+ c = &Cast{To: to}
+ if r := fn(c); r != nil {
+ return r
+ }
+ return c
+}
+
+func (c *Cast) GoString() string {
+ return c.goString(0, "")
+}
+
+func (c *Cast) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sCast\n%s", indent, "", field,
+ c.To.goString(indent+2, "To: "))
+}
+
+// The parenthesize function prints the string for val, wrapped in
+// parentheses if necessary.
+func parenthesize(ps *printState, val AST) {
+ paren := false
+ switch v := val.(type) {
+ case *Name, *InitializerList, *FunctionParam:
+ case *Qualified:
+ if v.LocalName {
+ paren = true
+ }
+ default:
+ paren = true
+ }
+ if paren {
+ ps.writeByte('(')
+ }
+ ps.print(val)
+ if paren {
+ ps.writeByte(')')
+ }
+}
+
+// Nullary is an operator in an expression with no arguments, such as
+// throw.
+type Nullary struct {
+ Op AST
+}
+
+func (n *Nullary) print(ps *printState) {
+ if op, ok := n.Op.(*Operator); ok {
+ ps.writeString(op.Name)
+ } else {
+ ps.print(n.Op)
+ }
+}
+
+func (n *Nullary) Traverse(fn func(AST) bool) {
+ if fn(n) {
+ n.Op.Traverse(fn)
+ }
+}
+
+func (n *Nullary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(n) {
+ return nil
+ }
+ op := n.Op.Copy(fn, skip)
+ if op == nil {
+ return fn(n)
+ }
+ n = &Nullary{Op: op}
+ if r := fn(n); r != nil {
+ return r
+ }
+ return n
+}
+
+func (n *Nullary) GoString() string {
+ return n.goString(0, "")
+}
+
+func (n *Nullary) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sNullary:\n%s", indent, "", field,
+ n.Op.goString(indent+2, "Op: "))
+}
+
+// Unary is a unary operation in an expression.
+type Unary struct {
+ Op AST
+ Expr AST
+ Suffix bool // true for ++ -- when used as postfix
+ SizeofType bool // true for sizeof (type)
+}
+
+func (u *Unary) print(ps *printState) {
+ expr := u.Expr
+
+ // Don't print the argument list when taking the address of a
+ // function.
+ if op, ok := u.Op.(*Operator); ok && op.Name == "&" {
+ if t, ok := expr.(*Typed); ok {
+ if _, ok := t.Type.(*FunctionType); ok {
+ expr = t.Name
+ }
+ }
+ }
+
+ if u.Suffix {
+ parenthesize(ps, expr)
+ }
+
+ if op, ok := u.Op.(*Operator); ok {
+ ps.writeString(op.Name)
+ } else if c, ok := u.Op.(*Cast); ok {
+ ps.writeByte('(')
+ ps.print(c.To)
+ ps.writeByte(')')
+ } else {
+ ps.print(u.Op)
+ }
+
+ if !u.Suffix {
+ if op, ok := u.Op.(*Operator); ok && op.Name == "::" {
+ // Don't use parentheses after ::.
+ ps.print(expr)
+ } else if u.SizeofType {
+ // Always use parentheses for sizeof argument.
+ ps.writeByte('(')
+ ps.print(expr)
+ ps.writeByte(')')
+ } else {
+ parenthesize(ps, expr)
+ }
+ }
+}
+
+func (u *Unary) Traverse(fn func(AST) bool) {
+ if fn(u) {
+ u.Op.Traverse(fn)
+ u.Expr.Traverse(fn)
+ }
+}
+
+func (u *Unary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(u) {
+ return nil
+ }
+ op := u.Op.Copy(fn, skip)
+ expr := u.Expr.Copy(fn, skip)
+ if op == nil && expr == nil {
+ return fn(u)
+ }
+ if op == nil {
+ op = u.Op
+ }
+ if expr == nil {
+ expr = u.Expr
+ }
+ u = &Unary{Op: op, Expr: expr, Suffix: u.Suffix, SizeofType: u.SizeofType}
+ if r := fn(u); r != nil {
+ return r
+ }
+ return u
+}
+
+func (u *Unary) GoString() string {
+ return u.goString(0, "")
+}
+
+func (u *Unary) goString(indent int, field string) string {
+ var s string
+ if u.Suffix {
+ s = " Suffix: true"
+ }
+ if u.SizeofType {
+ s += " SizeofType: true"
+ }
+ return fmt.Sprintf("%*s%sUnary:%s\n%s\n%s", indent, "", field,
+ s, u.Op.goString(indent+2, "Op: "),
+ u.Expr.goString(indent+2, "Expr: "))
+}
+
+// isDesignatedInitializer reports whether x is a designated
+// initializer.
+func isDesignatedInitializer(x AST) bool {
+ switch x := x.(type) {
+ case *Binary:
+ if op, ok := x.Op.(*Operator); ok {
+ return op.Name == "=" || op.Name == "]="
+ }
+ case *Trinary:
+ if op, ok := x.Op.(*Operator); ok {
+ return op.Name == "[...]="
+ }
+ }
+ return false
+}
+
+// Binary is a binary operation in an expression.
+type Binary struct {
+ Op AST
+ Left AST
+ Right AST
+}
+
+func (b *Binary) print(ps *printState) {
+ op, _ := b.Op.(*Operator)
+
+ if op != nil && strings.Contains(op.Name, "cast") {
+ ps.writeString(op.Name)
+ ps.writeByte('<')
+ ps.print(b.Left)
+ ps.writeString(">(")
+ ps.print(b.Right)
+ ps.writeByte(')')
+ return
+ }
+
+ if isDesignatedInitializer(b) {
+ if op.Name == "=" {
+ ps.writeByte('.')
+ } else {
+ ps.writeByte('[')
+ }
+ ps.print(b.Left)
+ if op.Name == "]=" {
+ ps.writeByte(']')
+ }
+ if isDesignatedInitializer(b.Right) {
+ // Don't add anything between designated
+ // initializer chains.
+ ps.print(b.Right)
+ } else {
+ ps.writeByte('=')
+ parenthesize(ps, b.Right)
+ }
+ return
+ }
+
+ // Use an extra set of parentheses around an expression that
+ // uses the greater-than operator, so that it does not get
+ // confused with the '>' that ends template parameters.
+ if op != nil && op.Name == ">" {
+ ps.writeByte('(')
+ }
+
+ left := b.Left
+
+ // For a function call in an expression, don't print the types
+ // of the arguments unless there is a return type.
+ skipParens := false
+ if op != nil && op.Name == "()" {
+ if ty, ok := b.Left.(*Typed); ok {
+ if ft, ok := ty.Type.(*FunctionType); ok {
+ if ft.Return == nil {
+ left = ty.Name
+ } else {
+ skipParens = true
+ }
+ } else {
+ left = ty.Name
+ }
+ }
+ }
+
+ if skipParens {
+ ps.print(left)
+ } else {
+ parenthesize(ps, left)
+ }
+
+ if op != nil && op.Name == "[]" {
+ ps.writeByte('[')
+ ps.print(b.Right)
+ ps.writeByte(']')
+ return
+ }
+
+ if op != nil {
+ if op.Name != "()" {
+ ps.writeString(op.Name)
+ }
+ } else {
+ ps.print(b.Op)
+ }
+
+ parenthesize(ps, b.Right)
+
+ if op != nil && op.Name == ">" {
+ ps.writeByte(')')
+ }
+}
+
+func (b *Binary) Traverse(fn func(AST) bool) {
+ if fn(b) {
+ b.Op.Traverse(fn)
+ b.Left.Traverse(fn)
+ b.Right.Traverse(fn)
+ }
+}
+
+func (b *Binary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(b) {
+ return nil
+ }
+ op := b.Op.Copy(fn, skip)
+ left := b.Left.Copy(fn, skip)
+ right := b.Right.Copy(fn, skip)
+ if op == nil && left == nil && right == nil {
+ return fn(b)
+ }
+ if op == nil {
+ op = b.Op
+ }
+ if left == nil {
+ left = b.Left
+ }
+ if right == nil {
+ right = b.Right
+ }
+ b = &Binary{Op: op, Left: left, Right: right}
+ if r := fn(b); r != nil {
+ return r
+ }
+ return b
+}
+
+func (b *Binary) GoString() string {
+ return b.goString(0, "")
+}
+
+func (b *Binary) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sBinary:\n%s\n%s\n%s", indent, "", field,
+ b.Op.goString(indent+2, "Op: "),
+ b.Left.goString(indent+2, "Left: "),
+ b.Right.goString(indent+2, "Right: "))
+}
+
+// Trinary is the ?: trinary operation in an expression.
+type Trinary struct {
+ Op AST
+ First AST
+ Second AST
+ Third AST
+}
+
+func (t *Trinary) print(ps *printState) {
+ if isDesignatedInitializer(t) {
+ ps.writeByte('[')
+ ps.print(t.First)
+ ps.writeString(" ... ")
+ ps.print(t.Second)
+ ps.writeByte(']')
+ if isDesignatedInitializer(t.Third) {
+ // Don't add anything between designated
+ // initializer chains.
+ ps.print(t.Third)
+ } else {
+ ps.writeByte('=')
+ parenthesize(ps, t.Third)
+ }
+ return
+ }
+
+ parenthesize(ps, t.First)
+ ps.writeByte('?')
+ parenthesize(ps, t.Second)
+ ps.writeString(" : ")
+ parenthesize(ps, t.Third)
+}
+
+func (t *Trinary) Traverse(fn func(AST) bool) {
+ if fn(t) {
+ t.Op.Traverse(fn)
+ t.First.Traverse(fn)
+ t.Second.Traverse(fn)
+ t.Third.Traverse(fn)
+ }
+}
+
+func (t *Trinary) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(t) {
+ return nil
+ }
+ op := t.Op.Copy(fn, skip)
+ first := t.First.Copy(fn, skip)
+ second := t.Second.Copy(fn, skip)
+ third := t.Third.Copy(fn, skip)
+ if op == nil && first == nil && second == nil && third == nil {
+ return fn(t)
+ }
+ if op == nil {
+ op = t.Op
+ }
+ if first == nil {
+ first = t.First
+ }
+ if second == nil {
+ second = t.Second
+ }
+ if third == nil {
+ third = t.Third
+ }
+ t = &Trinary{Op: op, First: first, Second: second, Third: third}
+ if r := fn(t); r != nil {
+ return r
+ }
+ return t
+}
+
+func (t *Trinary) GoString() string {
+ return t.goString(0, "")
+}
+
+func (t *Trinary) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sTrinary:\n%s\n%s\n%s\n%s", indent, "", field,
+ t.Op.goString(indent+2, "Op: "),
+ t.First.goString(indent+2, "First: "),
+ t.Second.goString(indent+2, "Second: "),
+ t.Third.goString(indent+2, "Third: "))
+}
+
+// Fold is a C++17 fold-expression. Arg2 is nil for a unary operator.
+type Fold struct {
+ Left bool
+ Op AST
+ Arg1 AST
+ Arg2 AST
+}
+
+func (f *Fold) print(ps *printState) {
+ op, _ := f.Op.(*Operator)
+ printOp := func() {
+ if op != nil {
+ ps.writeString(op.Name)
+ } else {
+ ps.print(f.Op)
+ }
+ }
+
+ if f.Arg2 == nil {
+ if f.Left {
+ ps.writeString("(...")
+ printOp()
+ parenthesize(ps, f.Arg1)
+ ps.writeString(")")
+ } else {
+ ps.writeString("(")
+ parenthesize(ps, f.Arg1)
+ printOp()
+ ps.writeString("...)")
+ }
+ } else {
+ ps.writeString("(")
+ parenthesize(ps, f.Arg1)
+ printOp()
+ ps.writeString("...")
+ printOp()
+ parenthesize(ps, f.Arg2)
+ ps.writeString(")")
+ }
+}
+
+func (f *Fold) Traverse(fn func(AST) bool) {
+ if fn(f) {
+ f.Op.Traverse(fn)
+ f.Arg1.Traverse(fn)
+ if f.Arg2 != nil {
+ f.Arg2.Traverse(fn)
+ }
+ }
+}
+
+func (f *Fold) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(f) {
+ return nil
+ }
+ op := f.Op.Copy(fn, skip)
+ arg1 := f.Arg1.Copy(fn, skip)
+ var arg2 AST
+ if f.Arg2 != nil {
+ arg2 = f.Arg2.Copy(fn, skip)
+ }
+ if op == nil && arg1 == nil && arg2 == nil {
+ return fn(f)
+ }
+ if op == nil {
+ op = f.Op
+ }
+ if arg1 == nil {
+ arg1 = f.Arg1
+ }
+ if arg2 == nil {
+ arg2 = f.Arg2
+ }
+ f = &Fold{Left: f.Left, Op: op, Arg1: arg1, Arg2: arg2}
+ if r := fn(f); r != nil {
+ return r
+ }
+ return f
+}
+
+func (f *Fold) GoString() string {
+ return f.goString(0, "")
+}
+
+func (f *Fold) goString(indent int, field string) string {
+ if f.Arg2 == nil {
+ return fmt.Sprintf("%*s%sFold: Left: %t\n%s\n%s", indent, "", field,
+ f.Left, f.Op.goString(indent+2, "Op: "),
+ f.Arg1.goString(indent+2, "Arg1: "))
+ } else {
+ return fmt.Sprintf("%*s%sFold: Left: %t\n%s\n%s\n%s", indent, "", field,
+ f.Left, f.Op.goString(indent+2, "Op: "),
+ f.Arg1.goString(indent+2, "Arg1: "),
+ f.Arg2.goString(indent+2, "Arg2: "))
+ }
+}
+
+// New is a use of operator new in an expression.
+type New struct {
+ Op AST
+ Place AST
+ Type AST
+ Init AST
+}
+
+func (n *New) print(ps *printState) {
+ // Op doesn't really matter for printing--we always print "new".
+ ps.writeString("new ")
+ if n.Place != nil {
+ parenthesize(ps, n.Place)
+ ps.writeByte(' ')
+ }
+ ps.print(n.Type)
+ if n.Init != nil {
+ parenthesize(ps, n.Init)
+ }
+}
+
+func (n *New) Traverse(fn func(AST) bool) {
+ if fn(n) {
+ n.Op.Traverse(fn)
+ if n.Place != nil {
+ n.Place.Traverse(fn)
+ }
+ n.Type.Traverse(fn)
+ if n.Init != nil {
+ n.Init.Traverse(fn)
+ }
+ }
+}
+
+func (n *New) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(n) {
+ return nil
+ }
+ op := n.Op.Copy(fn, skip)
+ var place AST
+ if n.Place != nil {
+ place = n.Place.Copy(fn, skip)
+ }
+ typ := n.Type.Copy(fn, skip)
+ var ini AST
+ if n.Init != nil {
+ ini = n.Init.Copy(fn, skip)
+ }
+ if op == nil && place == nil && typ == nil && ini == nil {
+ return fn(n)
+ }
+ if op == nil {
+ op = n.Op
+ }
+ if place == nil {
+ place = n.Place
+ }
+ if typ == nil {
+ typ = n.Type
+ }
+ if ini == nil {
+ ini = n.Init
+ }
+ n = &New{Op: op, Place: place, Type: typ, Init: ini}
+ if r := fn(n); r != nil {
+ return r
+ }
+ return n
+}
+
+func (n *New) GoString() string {
+ return n.goString(0, "")
+}
+
+func (n *New) goString(indent int, field string) string {
+ var place string
+ if n.Place == nil {
+ place = fmt.Sprintf("%*sPlace: nil", indent, "")
+ } else {
+ place = n.Place.goString(indent+2, "Place: ")
+ }
+ var ini string
+ if n.Init == nil {
+ ini = fmt.Sprintf("%*sInit: nil", indent, "")
+ } else {
+ ini = n.Init.goString(indent+2, "Init: ")
+ }
+ return fmt.Sprintf("%*s%sNew:\n%s\n%s\n%s\n%s", indent, "", field,
+ n.Op.goString(indent+2, "Op: "), place,
+ n.Type.goString(indent+2, "Type: "), ini)
+}
+
+// Literal is a literal in an expression.
+type Literal struct {
+ Type AST
+ Val string
+ Neg bool
+}
+
+// Suffixes to use for constants of the given integer type.
+var builtinTypeSuffix = map[string]string{
+ "int": "",
+ "unsigned int": "u",
+ "long": "l",
+ "unsigned long": "ul",
+ "long long": "ll",
+ "unsigned long long": "ull",
+}
+
+// Builtin float types.
+var builtinTypeFloat = map[string]bool{
+ "double": true,
+ "long double": true,
+ "float": true,
+ "__float128": true,
+ "half": true,
+}
+
+func (l *Literal) print(ps *printState) {
+ isFloat := false
+ if b, ok := l.Type.(*BuiltinType); ok {
+ if suffix, ok := builtinTypeSuffix[b.Name]; ok {
+ if l.Neg {
+ ps.writeByte('-')
+ }
+ ps.writeString(l.Val)
+ ps.writeString(suffix)
+ return
+ } else if b.Name == "bool" && !l.Neg {
+ switch l.Val {
+ case "0":
+ ps.writeString("false")
+ return
+ case "1":
+ ps.writeString("true")
+ return
+ }
+ } else if b.Name == "decltype(nullptr)" && l.Val == "" {
+ ps.print(l.Type)
+ return
+ } else {
+ isFloat = builtinTypeFloat[b.Name]
+ }
+ }
+
+ ps.writeByte('(')
+ ps.print(l.Type)
+ ps.writeByte(')')
+
+ if isFloat {
+ ps.writeByte('[')
+ }
+ if l.Neg {
+ ps.writeByte('-')
+ }
+ ps.writeString(l.Val)
+ if isFloat {
+ ps.writeByte(']')
+ }
+}
+
+func (l *Literal) Traverse(fn func(AST) bool) {
+ if fn(l) {
+ l.Type.Traverse(fn)
+ }
+}
+
+func (l *Literal) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(l) {
+ return nil
+ }
+ typ := l.Type.Copy(fn, skip)
+ if typ == nil {
+ return fn(l)
+ }
+ l = &Literal{Type: typ, Val: l.Val, Neg: l.Neg}
+ if r := fn(l); r != nil {
+ return r
+ }
+ return l
+}
+
+func (l *Literal) GoString() string {
+ return l.goString(0, "")
+}
+
+func (l *Literal) goString(indent int, field string) string {
+ var neg string
+ if l.Neg {
+ neg = " Neg: true"
+ }
+ return fmt.Sprintf("%*s%sLiteral:%s\n%s\n%*sVal: %s", indent, "", field,
+ neg, l.Type.goString(indent+2, "Type: "),
+ indent+2, "", l.Val)
+}
+
+// ExprList is a list of expressions, typically arguments to a
+// function call in an expression.
+type ExprList struct {
+ Exprs []AST
+}
+
+func (el *ExprList) print(ps *printState) {
+ for i, e := range el.Exprs {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(e)
+ }
+}
+
+func (el *ExprList) Traverse(fn func(AST) bool) {
+ if fn(el) {
+ for _, e := range el.Exprs {
+ e.Traverse(fn)
+ }
+ }
+}
+
+func (el *ExprList) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(el) {
+ return nil
+ }
+ exprs := make([]AST, len(el.Exprs))
+ changed := false
+ for i, e := range el.Exprs {
+ ec := e.Copy(fn, skip)
+ if ec == nil {
+ exprs[i] = e
+ } else {
+ exprs[i] = ec
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(el)
+ }
+ el = &ExprList{Exprs: exprs}
+ if r := fn(el); r != nil {
+ return r
+ }
+ return el
+}
+
+func (el *ExprList) GoString() string {
+ return el.goString(0, "")
+}
+
+func (el *ExprList) goString(indent int, field string) string {
+ if len(el.Exprs) == 0 {
+ return fmt.Sprintf("%*s%sExprList: nil", indent, "", field)
+ }
+ s := fmt.Sprintf("%*s%sExprList:", indent, "", field)
+ for i, e := range el.Exprs {
+ s += "\n"
+ s += e.goString(indent+2, fmt.Sprintf("%d: ", i))
+ }
+ return s
+}
+
+// InitializerList is an initializer list: an optional type with a
+// list of expressions.
+type InitializerList struct {
+ Type AST
+ Exprs AST
+}
+
+func (il *InitializerList) print(ps *printState) {
+ if il.Type != nil {
+ ps.print(il.Type)
+ }
+ ps.writeByte('{')
+ ps.print(il.Exprs)
+ ps.writeByte('}')
+}
+
+func (il *InitializerList) Traverse(fn func(AST) bool) {
+ if fn(il) {
+ if il.Type != nil {
+ il.Type.Traverse(fn)
+ }
+ il.Exprs.Traverse(fn)
+ }
+}
+
+func (il *InitializerList) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(il) {
+ return nil
+ }
+ var typ AST
+ if il.Type != nil {
+ typ = il.Type.Copy(fn, skip)
+ }
+ exprs := il.Exprs.Copy(fn, skip)
+ if typ == nil && exprs == nil {
+ return fn(il)
+ }
+ if typ == nil {
+ typ = il.Type
+ }
+ if exprs == nil {
+ exprs = il.Exprs
+ }
+ il = &InitializerList{Type: typ, Exprs: exprs}
+ if r := fn(il); r != nil {
+ return r
+ }
+ return il
+}
+
+func (il *InitializerList) GoString() string {
+ return il.goString(0, "")
+}
+
+func (il *InitializerList) goString(indent int, field string) string {
+ var t string
+ if il.Type == nil {
+ t = fmt.Sprintf("%*sType: nil", indent+2, "")
+ } else {
+ t = il.Type.goString(indent+2, "Type: ")
+ }
+ return fmt.Sprintf("%*s%sInitializerList:\n%s\n%s", indent, "", field,
+ t, il.Exprs.goString(indent+2, "Exprs: "))
+}
+
+// DefaultArg holds a default argument for a local name.
+type DefaultArg struct {
+ Num int
+ Arg AST
+}
+
+func (da *DefaultArg) print(ps *printState) {
+ fmt.Fprintf(&ps.buf, "{default arg#%d}::", da.Num+1)
+ ps.print(da.Arg)
+}
+
+func (da *DefaultArg) Traverse(fn func(AST) bool) {
+ if fn(da) {
+ da.Arg.Traverse(fn)
+ }
+}
+
+func (da *DefaultArg) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(da) {
+ return nil
+ }
+ arg := da.Arg.Copy(fn, skip)
+ if arg == nil {
+ return fn(da)
+ }
+ da = &DefaultArg{Num: da.Num, Arg: arg}
+ if r := fn(da); r != nil {
+ return r
+ }
+ return da
+}
+
+func (da *DefaultArg) GoString() string {
+ return da.goString(0, "")
+}
+
+func (da *DefaultArg) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sDefaultArg: Num: %d\n%s", indent, "", field, da.Num,
+ da.Arg.goString(indent+2, "Arg: "))
+}
+
+// Closure is a closure, or lambda expression.
+type Closure struct {
+ Types []AST
+ Num int
+}
+
+func (cl *Closure) print(ps *printState) {
+ ps.writeString("{lambda(")
+ for i, t := range cl.Types {
+ if i > 0 {
+ ps.writeString(", ")
+ }
+ ps.print(t)
+ }
+ ps.writeString(fmt.Sprintf(")#%d}", cl.Num+1))
+}
+
+func (cl *Closure) Traverse(fn func(AST) bool) {
+ if fn(cl) {
+ for _, t := range cl.Types {
+ t.Traverse(fn)
+ }
+ }
+}
+
+func (cl *Closure) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(cl) {
+ return nil
+ }
+ types := make([]AST, len(cl.Types))
+ changed := false
+ for i, t := range cl.Types {
+ tc := t.Copy(fn, skip)
+ if tc == nil {
+ types[i] = t
+ } else {
+ types[i] = tc
+ changed = true
+ }
+ }
+ if !changed {
+ return fn(cl)
+ }
+ cl = &Closure{Types: types, Num: cl.Num}
+ if r := fn(cl); r != nil {
+ return r
+ }
+ return cl
+}
+
+func (cl *Closure) GoString() string {
+ return cl.goString(0, "")
+}
+
+func (cl *Closure) goString(indent int, field string) string {
+ var types string
+ if len(cl.Types) == 0 {
+ types = fmt.Sprintf("%*sTypes: nil", indent+2, "")
+ } else {
+ types = fmt.Sprintf("%*sTypes:", indent+2, "")
+ for i, t := range cl.Types {
+ types += "\n"
+ types += t.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sClosure: Num: %d\n%s", indent, "", field, cl.Num, types)
+}
+
+// UnnamedType is an unnamed type, that just has an index.
+type UnnamedType struct {
+ Num int
+}
+
+func (ut *UnnamedType) print(ps *printState) {
+ ps.writeString(fmt.Sprintf("{unnamed type#%d}", ut.Num+1))
+}
+
+func (ut *UnnamedType) Traverse(fn func(AST) bool) {
+ fn(ut)
+}
+
+func (ut *UnnamedType) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ut) {
+ return nil
+ }
+ return fn(ut)
+}
+
+func (ut *UnnamedType) GoString() string {
+ return ut.goString(0, "")
+}
+
+func (ut *UnnamedType) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sUnnamedType: Num: %d", indent, "", field, ut.Num)
+}
+
+// Clone is a clone of a function, with a distinguishing suffix.
+type Clone struct {
+ Base AST
+ Suffix string
+}
+
+func (c *Clone) print(ps *printState) {
+ ps.print(c.Base)
+ ps.writeString(fmt.Sprintf(" [clone %s]", c.Suffix))
+}
+
+func (c *Clone) Traverse(fn func(AST) bool) {
+ if fn(c) {
+ c.Base.Traverse(fn)
+ }
+}
+
+func (c *Clone) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(c) {
+ return nil
+ }
+ base := c.Base.Copy(fn, skip)
+ if base == nil {
+ return fn(c)
+ }
+ c = &Clone{Base: base, Suffix: c.Suffix}
+ if r := fn(c); r != nil {
+ return r
+ }
+ return c
+}
+
+func (c *Clone) GoString() string {
+ return c.goString(0, "")
+}
+
+func (c *Clone) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sClone: Suffix: %s\n%s", indent, "", field,
+ c.Suffix, c.Base.goString(indent+2, "Base: "))
+}
+
+// Special is a special symbol, printed as a prefix plus another
+// value.
+type Special struct {
+ Prefix string
+ Val AST
+}
+
+func (s *Special) print(ps *printState) {
+ ps.writeString(s.Prefix)
+ ps.print(s.Val)
+}
+
+func (s *Special) Traverse(fn func(AST) bool) {
+ if fn(s) {
+ s.Val.Traverse(fn)
+ }
+}
+
+func (s *Special) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(s) {
+ return nil
+ }
+ val := s.Val.Copy(fn, skip)
+ if val == nil {
+ return fn(s)
+ }
+ s = &Special{Prefix: s.Prefix, Val: val}
+ if r := fn(s); r != nil {
+ return r
+ }
+ return s
+}
+
+func (s *Special) GoString() string {
+ return s.goString(0, "")
+}
+
+func (s *Special) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sSpecial: Prefix: %s\n%s", indent, "", field,
+ s.Prefix, s.Val.goString(indent+2, "Val: "))
+}
+
+// Special2 is like special, but uses two values.
+type Special2 struct {
+ Prefix string
+ Val1 AST
+ Middle string
+ Val2 AST
+}
+
+func (s *Special2) print(ps *printState) {
+ ps.writeString(s.Prefix)
+ ps.print(s.Val1)
+ ps.writeString(s.Middle)
+ ps.print(s.Val2)
+}
+
+func (s *Special2) Traverse(fn func(AST) bool) {
+ if fn(s) {
+ s.Val1.Traverse(fn)
+ s.Val2.Traverse(fn)
+ }
+}
+
+func (s *Special2) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(s) {
+ return nil
+ }
+ val1 := s.Val1.Copy(fn, skip)
+ val2 := s.Val2.Copy(fn, skip)
+ if val1 == nil && val2 == nil {
+ return fn(s)
+ }
+ if val1 == nil {
+ val1 = s.Val1
+ }
+ if val2 == nil {
+ val2 = s.Val2
+ }
+ s = &Special2{Prefix: s.Prefix, Val1: val1, Middle: s.Middle, Val2: val2}
+ if r := fn(s); r != nil {
+ return r
+ }
+ return s
+}
+
+func (s *Special2) GoString() string {
+ return s.goString(0, "")
+}
+
+func (s *Special2) goString(indent int, field string) string {
+ return fmt.Sprintf("%*s%sSpecial2: Prefix: %s\n%s\n%*sMiddle: %s\n%s", indent, "", field,
+ s.Prefix, s.Val1.goString(indent+2, "Val1: "),
+ indent+2, "", s.Middle, s.Val2.goString(indent+2, "Val2: "))
+}
+
+// EnableIf is used by clang for an enable_if attribute.
+type EnableIf struct {
+ Type AST
+ Args []AST
+}
+
+func (ei *EnableIf) print(ps *printState) {
+ ps.print(ei.Type)
+ ps.writeString(" [enable_if:")
+ first := true
+ for _, a := range ei.Args {
+ if !first {
+ ps.writeString(", ")
+ }
+ ps.print(a)
+ first = false
+ }
+ ps.writeString("]")
+}
+
+func (ei *EnableIf) Traverse(fn func(AST) bool) {
+ if fn(ei) {
+ ei.Type.Traverse(fn)
+ for _, a := range ei.Args {
+ a.Traverse(fn)
+ }
+ }
+}
+
+func (ei *EnableIf) Copy(fn func(AST) AST, skip func(AST) bool) AST {
+ if skip(ei) {
+ return nil
+ }
+ typ := ei.Type.Copy(fn, skip)
+ argsChanged := false
+ args := make([]AST, len(ei.Args))
+ for i, a := range ei.Args {
+ ac := a.Copy(fn, skip)
+ if ac == nil {
+ args[i] = a
+ } else {
+ args[i] = ac
+ argsChanged = true
+ }
+ }
+ if typ == nil && !argsChanged {
+ return fn(ei)
+ }
+ if typ == nil {
+ typ = ei.Type
+ }
+ ei = &EnableIf{Type: typ, Args: args}
+ if r := fn(ei); r != nil {
+ return r
+ }
+ return ei
+}
+
+func (ei *EnableIf) GoString() string {
+ return ei.goString(0, "")
+}
+
+func (ei *EnableIf) goString(indent int, field string) string {
+ var args string
+ if len(ei.Args) == 0 {
+ args = fmt.Sprintf("%*sArgs: nil", indent+2, "")
+ } else {
+ args = fmt.Sprintf("%*sArgs:", indent+2, "")
+ for i, a := range ei.Args {
+ args += "\n"
+ args += a.goString(indent+4, fmt.Sprintf("%d: ", i))
+ }
+ }
+ return fmt.Sprintf("%*s%sEnableIf:\n%s\n%s", indent, "", field,
+ ei.Type.goString(indent+2, "Type: "), args)
+}
+
+// Print the inner types.
+func (ps *printState) printInner(prefixOnly bool) []AST {
+ var save []AST
+ var psave *[]AST
+ if prefixOnly {
+ psave = &save
+ }
+ for len(ps.inner) > 0 {
+ ps.printOneInner(psave)
+ }
+ return save
+}
+
+// innerPrinter is an interface for types that can print themselves as
+// inner types.
+type innerPrinter interface {
+ printInner(*printState)
+}
+
+// Print the most recent inner type. If save is not nil, only print
+// prefixes.
+func (ps *printState) printOneInner(save *[]AST) {
+ if len(ps.inner) == 0 {
+ panic("printOneInner called with no inner types")
+ }
+ ln := len(ps.inner)
+ a := ps.inner[ln-1]
+ ps.inner = ps.inner[:ln-1]
+
+ if save != nil {
+ if _, ok := a.(*MethodWithQualifiers); ok {
+ *save = append(*save, a)
+ return
+ }
+ }
+
+ if ip, ok := a.(innerPrinter); ok {
+ ip.printInner(ps)
+ } else {
+ ps.print(a)
+ }
+}
+
+// isEmpty returns whether printing a will not print anything.
+func (ps *printState) isEmpty(a AST) bool {
+ switch a := a.(type) {
+ case *ArgumentPack:
+ return len(a.Args) == 0
+ case *ExprList:
+ return len(a.Exprs) == 0
+ case *PackExpansion:
+ return a.Pack != nil && ps.isEmpty(a.Base)
+ default:
+ return false
+ }
+}
diff --git a/src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go b/src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go
new file mode 100644
index 0000000..c266744
--- /dev/null
+++ b/src/cmd/vendor/github.com/ianlancetaylor/demangle/demangle.go
@@ -0,0 +1,2837 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package demangle defines functions that demangle GCC/LLVM C++ symbol names.
+// This package recognizes names that were mangled according to the C++ ABI
+// defined at http://codesourcery.com/cxx-abi/.
+//
+// Most programs will want to call Filter or ToString.
+package demangle
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+)
+
+// ErrNotMangledName is returned by CheckedDemangle if the string does
+// not appear to be a C++ symbol name.
+var ErrNotMangledName = errors.New("not a C++ mangled name")
+
+// Option is the type of demangler options.
+type Option int
+
+const (
+ // The NoParams option disables demangling of function parameters.
+ NoParams Option = iota
+
+ // The NoTemplateParams option disables demangling of template parameters.
+ NoTemplateParams
+
+ // The NoClones option disables inclusion of clone suffixes.
+ // NoParams implies NoClones.
+ NoClones
+
+ // The Verbose option turns on more verbose demangling.
+ Verbose
+)
+
+// Filter demangles a C++ symbol name, returning the human-readable C++ name.
+// If any error occurs during demangling, the input string is returned.
+func Filter(name string, options ...Option) string {
+ ret, err := ToString(name, options...)
+ if err != nil {
+ return name
+ }
+ return ret
+}
+
+// ToString demangles a C++ symbol name, returning a human-readable C++
+// name or an error.
+// If the name does not appear to be a C++ symbol name at all, the
+// error will be ErrNotMangledName.
+func ToString(name string, options ...Option) (string, error) {
+ a, err := ToAST(name, options...)
+ if err != nil {
+ return "", err
+ }
+ return ASTToString(a, options...), nil
+}
+
+// ToAST demangles a C++ symbol name into an abstract syntax tree
+// representing the symbol.
+// If the NoParams option is passed, and the name has a function type,
+// the parameter types are not demangled.
+// If the name does not appear to be a C++ symbol name at all, the
+// error will be ErrNotMangledName.
+func ToAST(name string, options ...Option) (AST, error) {
+ if strings.HasPrefix(name, "_Z") {
+ a, err := doDemangle(name[2:], options...)
+ return a, adjustErr(err, 2)
+ }
+
+ const prefix = "_GLOBAL_"
+ if strings.HasPrefix(name, prefix) {
+ // The standard demangler ignores NoParams for global
+ // constructors. We are compatible.
+ i := 0
+ for i < len(options) {
+ if options[i] == NoParams {
+ options = append(options[:i], options[i+1:]...)
+ } else {
+ i++
+ }
+ }
+ a, err := globalCDtorName(name[len(prefix):], options...)
+ return a, adjustErr(err, len(prefix))
+ }
+
+ return nil, ErrNotMangledName
+}
+
+// globalCDtorName demangles a global constructor/destructor symbol name.
+// The parameter is the string following the "_GLOBAL_" prefix.
+func globalCDtorName(name string, options ...Option) (AST, error) {
+ if len(name) < 4 {
+ return nil, ErrNotMangledName
+ }
+ switch name[0] {
+ case '.', '_', '$':
+ default:
+ return nil, ErrNotMangledName
+ }
+
+ var ctor bool
+ switch name[1] {
+ case 'I':
+ ctor = true
+ case 'D':
+ ctor = false
+ default:
+ return nil, ErrNotMangledName
+ }
+
+ if name[2] != '_' {
+ return nil, ErrNotMangledName
+ }
+
+ if !strings.HasPrefix(name[3:], "_Z") {
+ return &GlobalCDtor{Ctor: ctor, Key: &Name{Name: name}}, nil
+ } else {
+ a, err := doDemangle(name[5:], options...)
+ if err != nil {
+ return nil, adjustErr(err, 5)
+ }
+ return &GlobalCDtor{Ctor: ctor, Key: a}, nil
+ }
+}
+
+// The doDemangle function is the entry point into the demangler proper.
+func doDemangle(name string, options ...Option) (ret AST, err error) {
+ // When the demangling routines encounter an error, they panic
+ // with a value of type demangleErr.
+ defer func() {
+ if r := recover(); r != nil {
+ if de, ok := r.(demangleErr); ok {
+ ret = nil
+ err = de
+ return
+ }
+ panic(r)
+ }
+ }()
+
+ params := true
+ clones := true
+ verbose := false
+ for _, o := range options {
+ switch o {
+ case NoParams:
+ params = false
+ clones = false
+ case NoTemplateParams:
+ // This is a valid option but only affect printing of the AST.
+ case NoClones:
+ clones = false
+ case Verbose:
+ verbose = true
+ default:
+ return nil, fmt.Errorf("unrecognized demangler option %v", o)
+ }
+ }
+
+ st := &state{str: name, verbose: verbose}
+ a := st.encoding(params, notForLocalName)
+
+ // Accept a clone suffix.
+ if clones {
+ for len(st.str) > 1 && st.str[0] == '.' && (isLower(st.str[1]) || st.str[1] == '_' || isDigit(st.str[1])) {
+ a = st.cloneSuffix(a)
+ }
+ }
+
+ if clones && len(st.str) > 0 {
+ st.fail("unparsed characters at end of mangled name")
+ }
+
+ return a, nil
+}
+
+// A state holds the current state of demangling a string.
+type state struct {
+ str string // remainder of string to demangle
+ verbose bool // whether to use verbose demangling
+ off int // offset of str within original string
+ subs substitutions // substitutions
+ templates []*Template // templates being processed
+ inLambda int // number of lambdas being parsed
+}
+
+// copy returns a copy of the current state.
+func (st *state) copy() *state {
+ n := new(state)
+ *n = *st
+ return n
+}
+
+// fail panics with demangleErr, to be caught in doDemangle.
+func (st *state) fail(err string) {
+ panic(demangleErr{err: err, off: st.off})
+}
+
+// failEarlier is like fail, but decrements the offset to indicate
+// that the point of failure occurred earlier in the string.
+func (st *state) failEarlier(err string, dec int) {
+ if st.off < dec {
+ panic("internal error")
+ }
+ panic(demangleErr{err: err, off: st.off - dec})
+}
+
+// advance advances the current string offset.
+func (st *state) advance(add int) {
+ if len(st.str) < add {
+ panic("internal error")
+ }
+ st.str = st.str[add:]
+ st.off += add
+}
+
+// checkChar requires that the next character in the string be c, and
+// advances past it.
+func (st *state) checkChar(c byte) {
+ if len(st.str) == 0 || st.str[0] != c {
+ panic("internal error")
+ }
+ st.advance(1)
+}
+
+// A demangleErr is an error at a specific offset in the mangled
+// string.
+type demangleErr struct {
+ err string
+ off int
+}
+
+// Error implements the builtin error interface for demangleErr.
+func (de demangleErr) Error() string {
+ return fmt.Sprintf("%s at %d", de.err, de.off)
+}
+
+// adjustErr adjusts the position of err, if it is a demangleErr,
+// and returns err.
+func adjustErr(err error, adj int) error {
+ if err == nil {
+ return nil
+ }
+ if de, ok := err.(demangleErr); ok {
+ de.off += adj
+ return de
+ }
+ return err
+}
+
+type forLocalNameType int
+
+const (
+ forLocalName forLocalNameType = iota
+ notForLocalName
+)
+
+// encoding ::= <(function) name> <bare-function-type>
+// <(data) name>
+// <special-name>
+func (st *state) encoding(params bool, local forLocalNameType) AST {
+ if len(st.str) < 1 {
+ st.fail("expected encoding")
+ }
+
+ if st.str[0] == 'G' || st.str[0] == 'T' {
+ return st.specialName()
+ }
+
+ a := st.name()
+ a = simplify(a)
+
+ if !params {
+ // Don't demangle the parameters.
+
+ // Strip CV-qualifiers, as they apply to the 'this'
+ // parameter, and are not output by the standard
+ // demangler without parameters.
+ if mwq, ok := a.(*MethodWithQualifiers); ok {
+ a = mwq.Method
+ }
+
+ // If this is a local name, there may be CV-qualifiers
+ // on the name that really apply to the top level, and
+ // therefore must be discarded when discarding
+ // parameters. This can happen when parsing a class
+ // that is local to a function.
+ if q, ok := a.(*Qualified); ok && q.LocalName {
+ p := &q.Name
+ if da, ok := (*p).(*DefaultArg); ok {
+ p = &da.Arg
+ }
+ if mwq, ok := (*p).(*MethodWithQualifiers); ok {
+ *p = mwq.Method
+ }
+ }
+
+ return a
+ }
+
+ if len(st.str) == 0 || st.str[0] == 'E' {
+ // There are no parameters--this is a data symbol, not
+ // a function symbol.
+ return a
+ }
+
+ check := a
+ mwq, _ := check.(*MethodWithQualifiers)
+ if mwq != nil {
+ check = mwq.Method
+ }
+
+ var template *Template
+ switch check := check.(type) {
+ case *Template:
+ template = check
+ case *Qualified:
+ if check.LocalName {
+ n := check.Name
+ if nmwq, ok := n.(*MethodWithQualifiers); ok {
+ n = nmwq.Method
+ }
+ template, _ = n.(*Template)
+ }
+ }
+ var oldInLambda int
+ if template != nil {
+ st.templates = append(st.templates, template)
+ oldInLambda = st.inLambda
+ st.inLambda = 0
+ }
+
+ // Checking for the enable_if attribute here is what the LLVM
+ // demangler does. This is not very general but perhaps it is
+ // sufficent.
+ const enableIfPrefix = "Ua9enable_ifI"
+ var enableIfArgs []AST
+ if strings.HasPrefix(st.str, enableIfPrefix) {
+ st.advance(len(enableIfPrefix) - 1)
+ enableIfArgs = st.templateArgs()
+ }
+
+ ft := st.bareFunctionType(hasReturnType(a))
+
+ if template != nil {
+ st.templates = st.templates[:len(st.templates)-1]
+ st.inLambda = oldInLambda
+ }
+
+ ft = simplify(ft)
+
+ // For a local name, discard the return type, so that it
+ // doesn't get confused with the top level return type.
+ if local == forLocalName {
+ if functype, ok := ft.(*FunctionType); ok {
+ functype.Return = nil
+ }
+ }
+
+ // Any top-level qualifiers belong to the function type.
+ if mwq != nil {
+ a = mwq.Method
+ mwq.Method = ft
+ ft = mwq
+ }
+ if q, ok := a.(*Qualified); ok && q.LocalName {
+ p := &q.Name
+ if da, ok := (*p).(*DefaultArg); ok {
+ p = &da.Arg
+ }
+ if mwq, ok := (*p).(*MethodWithQualifiers); ok {
+ *p = mwq.Method
+ mwq.Method = ft
+ ft = mwq
+ }
+ }
+
+ r := AST(&Typed{Name: a, Type: ft})
+
+ if len(enableIfArgs) > 0 {
+ r = &EnableIf{Type: r, Args: enableIfArgs}
+ }
+
+ return r
+}
+
+// hasReturnType returns whether the mangled form of a will have a
+// return type.
+func hasReturnType(a AST) bool {
+ switch a := a.(type) {
+ case *Qualified:
+ if a.LocalName {
+ return hasReturnType(a.Name)
+ }
+ return false
+ case *Template:
+ return !isCDtorConversion(a.Name)
+ case *TypeWithQualifiers:
+ return hasReturnType(a.Base)
+ case *MethodWithQualifiers:
+ return hasReturnType(a.Method)
+ default:
+ return false
+ }
+}
+
+// isCDtorConversion returns when an AST is a constructor, a
+// destructor, or a conversion operator.
+func isCDtorConversion(a AST) bool {
+ switch a := a.(type) {
+ case *Qualified:
+ return isCDtorConversion(a.Name)
+ case *Constructor, *Destructor, *Cast:
+ return true
+ default:
+ return false
+ }
+}
+
+// <tagged-name> ::= <name> B <source-name>
+func (st *state) taggedName(a AST) AST {
+ for len(st.str) > 0 && st.str[0] == 'B' {
+ st.advance(1)
+ tag := st.sourceName()
+ a = &TaggedName{Name: a, Tag: tag}
+ }
+ return a
+}
+
+// <name> ::= <nested-name>
+// ::= <unscoped-name>
+// ::= <unscoped-template-name> <template-args>
+// ::= <local-name>
+//
+// <unscoped-name> ::= <unqualified-name>
+// ::= St <unqualified-name>
+//
+// <unscoped-template-name> ::= <unscoped-name>
+// ::= <substitution>
+func (st *state) name() AST {
+ if len(st.str) < 1 {
+ st.fail("expected name")
+ }
+ switch st.str[0] {
+ case 'N':
+ return st.nestedName()
+ case 'Z':
+ return st.localName()
+ case 'U':
+ a, isCast := st.unqualifiedName()
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+ case 'S':
+ if len(st.str) < 2 {
+ st.advance(1)
+ st.fail("expected substitution index")
+ }
+ var a AST
+ isCast := false
+ subst := false
+ if st.str[1] == 't' {
+ st.advance(2)
+ a, isCast = st.unqualifiedName()
+ a = &Qualified{Scope: &Name{Name: "std"}, Name: a, LocalName: false}
+ } else {
+ a = st.substitution(false)
+ subst = true
+ }
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ // This can only happen if we saw
+ // <unscoped-template-name> and are about to see
+ // <template-args>. <unscoped-template-name> is a
+ // substitution candidate if it did not come from a
+ // substitution.
+ if !subst {
+ st.subs.add(a)
+ }
+ args := st.templateArgs()
+ tmpl := &Template{Name: a, Args: args}
+ if isCast {
+ st.setTemplate(a, tmpl)
+ st.clearTemplateArgs(args)
+ isCast = false
+ }
+ a = tmpl
+ }
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+
+ default:
+ a, isCast := st.unqualifiedName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ st.subs.add(a)
+ args := st.templateArgs()
+ tmpl := &Template{Name: a, Args: args}
+ if isCast {
+ st.setTemplate(a, tmpl)
+ st.clearTemplateArgs(args)
+ isCast = false
+ }
+ a = tmpl
+ }
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+ }
+}
+
+// <nested-name> ::= N [<CV-qualifiers>] [<ref-qualifier>] <prefix> <unqualified-name> E
+// ::= N [<CV-qualifiers>] [<ref-qualifier>] <template-prefix> <template-args> E
+func (st *state) nestedName() AST {
+ st.checkChar('N')
+ q := st.cvQualifiers()
+ r := st.refQualifier()
+ a := st.prefix()
+ if q != nil || r != "" {
+ a = &MethodWithQualifiers{Method: a, Qualifiers: q, RefQualifier: r}
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after nested name")
+ }
+ st.advance(1)
+ return a
+}
+
+// <prefix> ::= <prefix> <unqualified-name>
+// ::= <template-prefix> <template-args>
+// ::= <template-param>
+// ::= <decltype>
+// ::=
+// ::= <substitution>
+//
+// <template-prefix> ::= <prefix> <(template) unqualified-name>
+// ::= <template-param>
+// ::= <substitution>
+//
+// <decltype> ::= Dt <expression> E
+// ::= DT <expression> E
+func (st *state) prefix() AST {
+ var a AST
+
+ // The last name seen, for a constructor/destructor.
+ var last AST
+
+ getLast := func(a AST) AST {
+ for {
+ if t, ok := a.(*Template); ok {
+ a = t.Name
+ } else if q, ok := a.(*Qualified); ok {
+ a = q.Name
+ } else if t, ok := a.(*TaggedName); ok {
+ a = t.Name
+ } else {
+ return a
+ }
+ }
+ }
+
+ isCast := false
+ for {
+ if len(st.str) == 0 {
+ st.fail("expected prefix")
+ }
+ var next AST
+
+ c := st.str[0]
+ if isDigit(c) || isLower(c) || c == 'U' || c == 'L' {
+ un, isUnCast := st.unqualifiedName()
+ next = un
+ if isUnCast {
+ isCast = true
+ }
+ } else {
+ switch st.str[0] {
+ case 'C':
+ inheriting := false
+ st.advance(1)
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ inheriting = true
+ st.advance(1)
+ }
+ if len(st.str) < 1 {
+ st.fail("expected constructor type")
+ }
+ if last == nil {
+ st.fail("constructor before name is seen")
+ }
+ st.advance(1)
+ if inheriting {
+ last = st.demangleType(false)
+ }
+ next = &Constructor{Name: getLast(last)}
+ case 'D':
+ if len(st.str) > 1 && (st.str[1] == 'T' || st.str[1] == 't') {
+ next = st.demangleType(false)
+ } else {
+ if len(st.str) < 2 {
+ st.fail("expected destructor type")
+ }
+ if last == nil {
+ st.fail("destructor before name is seen")
+ }
+ st.advance(2)
+ next = &Destructor{Name: getLast(last)}
+ }
+ case 'S':
+ next = st.substitution(true)
+ case 'I':
+ if a == nil {
+ st.fail("unexpected template arguments")
+ }
+ var args []AST
+ args = st.templateArgs()
+ tmpl := &Template{Name: a, Args: args}
+ if isCast {
+ st.setTemplate(a, tmpl)
+ st.clearTemplateArgs(args)
+ isCast = false
+ }
+ a = nil
+ next = tmpl
+ case 'T':
+ next = st.templateParam()
+ case 'E':
+ if a == nil {
+ st.fail("expected prefix")
+ }
+ if isCast {
+ st.setTemplate(a, nil)
+ }
+ return a
+ case 'M':
+ if a == nil {
+ st.fail("unexpected lambda initializer")
+ }
+ // This is the initializer scope for a
+ // lambda. We don't need to record
+ // it. The normal code will treat the
+ // variable has a type scope, which
+ // gives appropriate output.
+ st.advance(1)
+ continue
+ case 'J':
+ // It appears that in some cases clang
+ // can emit a J for a template arg
+ // without the expected I. I don't
+ // know when this happens, but I've
+ // seen it in some large C++ programs.
+ if a == nil {
+ st.fail("unexpected template arguments")
+ }
+ var args []AST
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ arg := st.templateArg()
+ args = append(args, arg)
+ }
+ st.advance(1)
+ tmpl := &Template{Name: a, Args: args}
+ if isCast {
+ st.setTemplate(a, tmpl)
+ st.clearTemplateArgs(args)
+ isCast = false
+ }
+ a = nil
+ next = tmpl
+ default:
+ st.fail("unrecognized letter in prefix")
+ }
+ }
+ last = next
+ if a == nil {
+ a = next
+ } else {
+ a = &Qualified{Scope: a, Name: next, LocalName: false}
+ }
+
+ if c != 'S' && (len(st.str) == 0 || st.str[0] != 'E') {
+ st.subs.add(a)
+ }
+ }
+}
+
+// <unqualified-name> ::= <operator-name>
+// ::= <ctor-dtor-name>
+// ::= <source-name>
+// ::= <local-source-name>
+//
+// <local-source-name> ::= L <source-name> <discriminator>
+func (st *state) unqualifiedName() (r AST, isCast bool) {
+ if len(st.str) < 1 {
+ st.fail("expected unqualified name")
+ }
+ var a AST
+ isCast = false
+ c := st.str[0]
+ if isDigit(c) {
+ a = st.sourceName()
+ } else if isLower(c) {
+ a, _ = st.operatorName(false)
+ if _, ok := a.(*Cast); ok {
+ isCast = true
+ }
+ if op, ok := a.(*Operator); ok && op.Name == `operator"" ` {
+ n := st.sourceName()
+ a = &Unary{Op: op, Expr: n, Suffix: false, SizeofType: false}
+ }
+ } else {
+ switch c {
+ case 'C', 'D':
+ st.fail("constructor/destructor not in nested name")
+ case 'L':
+ st.advance(1)
+ a = st.sourceName()
+ a = st.discriminator(a)
+ case 'U':
+ if len(st.str) < 2 {
+ st.advance(1)
+ st.fail("expected closure or unnamed type")
+ }
+ c := st.str[1]
+ switch c {
+ case 'l':
+ a = st.closureTypeName()
+ case 't':
+ a = st.unnamedTypeName()
+ default:
+ st.advance(1)
+ st.fail("expected closure or unnamed type")
+ }
+ default:
+ st.fail("expected unqualified name")
+ }
+ }
+
+ if len(st.str) > 0 && st.str[0] == 'B' {
+ a = st.taggedName(a)
+ }
+
+ return a, isCast
+}
+
+// <source-name> ::= <(positive length) number> <identifier>
+// identifier ::= <(unqualified source code identifier)>
+func (st *state) sourceName() AST {
+ val := st.number()
+ if val <= 0 {
+ st.fail("expected positive number")
+ }
+ if len(st.str) < val {
+ st.fail("not enough characters for identifier")
+ }
+ id := st.str[:val]
+ st.advance(val)
+
+ // Look for GCC encoding of anonymous namespace, and make it
+ // more friendly.
+ const anonPrefix = "_GLOBAL_"
+ if strings.HasPrefix(id, anonPrefix) && len(id) > len(anonPrefix)+2 {
+ c1 := id[len(anonPrefix)]
+ c2 := id[len(anonPrefix)+1]
+ if (c1 == '.' || c1 == '_' || c1 == '$') && c2 == 'N' {
+ id = "(anonymous namespace)"
+ }
+ }
+
+ n := &Name{Name: id}
+ return n
+}
+
+// number ::= [n] <(non-negative decimal integer)>
+func (st *state) number() int {
+ neg := false
+ if len(st.str) > 0 && st.str[0] == 'n' {
+ neg = true
+ st.advance(1)
+ }
+ if len(st.str) == 0 || !isDigit(st.str[0]) {
+ st.fail("missing number")
+ }
+ val := 0
+ for len(st.str) > 0 && isDigit(st.str[0]) {
+ // Number picked to ensure we can't overflow with 32-bit int.
+ // Any very large number here is bogus.
+ if val >= 0x80000000/10-10 {
+ st.fail("numeric overflow")
+ }
+ val = val*10 + int(st.str[0]-'0')
+ st.advance(1)
+ }
+ if neg {
+ val = -val
+ }
+ return val
+}
+
+// An operator is the demangled name, and the number of arguments it
+// takes in an expression.
+type operator struct {
+ name string
+ args int
+}
+
+// The operators map maps the mangled operator names to information
+// about them.
+var operators = map[string]operator{
+ "aN": {"&=", 2},
+ "aS": {"=", 2},
+ "aa": {"&&", 2},
+ "ad": {"&", 1},
+ "an": {"&", 2},
+ "at": {"alignof ", 1},
+ "aw": {"co_await ", 1},
+ "az": {"alignof ", 1},
+ "cc": {"const_cast", 2},
+ "cl": {"()", 2},
+ // cp is not in the ABI but is used by clang "when the call
+ // would use ADL except for being parenthesized."
+ "cp": {"()", 2},
+ "cm": {",", 2},
+ "co": {"~", 1},
+ "dV": {"/=", 2},
+ "dX": {"[...]=", 3},
+ "da": {"delete[] ", 1},
+ "dc": {"dynamic_cast", 2},
+ "de": {"*", 1},
+ "di": {"=", 2},
+ "dl": {"delete ", 1},
+ "ds": {".*", 2},
+ "dt": {".", 2},
+ "dv": {"/", 2},
+ "dx": {"]=", 2},
+ "eO": {"^=", 2},
+ "eo": {"^", 2},
+ "eq": {"==", 2},
+ "fl": {"...", 2},
+ "fr": {"...", 2},
+ "fL": {"...", 3},
+ "fR": {"...", 3},
+ "ge": {">=", 2},
+ "gs": {"::", 1},
+ "gt": {">", 2},
+ "ix": {"[]", 2},
+ "lS": {"<<=", 2},
+ "le": {"<=", 2},
+ "li": {`operator"" `, 1},
+ "ls": {"<<", 2},
+ "lt": {"<", 2},
+ "mI": {"-=", 2},
+ "mL": {"*=", 2},
+ "mi": {"-", 2},
+ "ml": {"*", 2},
+ "mm": {"--", 1},
+ "na": {"new[]", 3},
+ "ne": {"!=", 2},
+ "ng": {"-", 1},
+ "nt": {"!", 1},
+ "nw": {"new", 3},
+ "oR": {"|=", 2},
+ "oo": {"||", 2},
+ "or": {"|", 2},
+ "pL": {"+=", 2},
+ "pl": {"+", 2},
+ "pm": {"->*", 2},
+ "pp": {"++", 1},
+ "ps": {"+", 1},
+ "pt": {"->", 2},
+ "qu": {"?", 3},
+ "rM": {"%=", 2},
+ "rS": {">>=", 2},
+ "rc": {"reinterpret_cast", 2},
+ "rm": {"%", 2},
+ "rs": {">>", 2},
+ "sP": {"sizeof...", 1},
+ "sZ": {"sizeof...", 1},
+ "sc": {"static_cast", 2},
+ "ss": {"<=>", 2},
+ "st": {"sizeof ", 1},
+ "sz": {"sizeof ", 1},
+ "tr": {"throw", 0},
+ "tw": {"throw ", 1},
+}
+
+// operator_name ::= many different two character encodings.
+// ::= cv <type>
+// ::= v <digit> <source-name>
+//
+// We need to know whether we are in an expression because it affects
+// how we handle template parameters in the type of a cast operator.
+func (st *state) operatorName(inExpression bool) (AST, int) {
+ if len(st.str) < 2 {
+ st.fail("missing operator code")
+ }
+ code := st.str[:2]
+ st.advance(2)
+ if code[0] == 'v' && isDigit(code[1]) {
+ name := st.sourceName()
+ return &Operator{Name: name.(*Name).Name}, int(code[1] - '0')
+ } else if code == "cv" {
+ // Push a nil on templates to indicate that template
+ // parameters will have their template filled in
+ // later.
+ if !inExpression {
+ st.templates = append(st.templates, nil)
+ }
+
+ t := st.demangleType(!inExpression)
+
+ if !inExpression {
+ st.templates = st.templates[:len(st.templates)-1]
+ }
+
+ return &Cast{To: t}, 1
+ } else if op, ok := operators[code]; ok {
+ return &Operator{Name: op.name}, op.args
+ } else {
+ st.failEarlier("unrecognized operator code", 2)
+ panic("not reached")
+ }
+}
+
+// <local-name> ::= Z <(function) encoding> E <(entity) name> [<discriminator>]
+// ::= Z <(function) encoding> E s [<discriminator>]
+// ::= Z <(function) encoding> E d [<parameter> number>] _ <entity name>
+func (st *state) localName() AST {
+ st.checkChar('Z')
+ fn := st.encoding(true, forLocalName)
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after local name")
+ }
+ st.advance(1)
+ if len(st.str) > 0 && st.str[0] == 's' {
+ st.advance(1)
+ var n AST = &Name{Name: "string literal"}
+ n = st.discriminator(n)
+ return &Qualified{Scope: fn, Name: n, LocalName: true}
+ } else {
+ num := -1
+ if len(st.str) > 0 && st.str[0] == 'd' {
+ // Default argument scope.
+ st.advance(1)
+ num = st.compactNumber()
+ }
+ n := st.name()
+ n = st.discriminator(n)
+ if num >= 0 {
+ n = &DefaultArg{Num: num, Arg: n}
+ }
+ return &Qualified{Scope: fn, Name: n, LocalName: true}
+ }
+}
+
+// Parse a Java resource special-name.
+func (st *state) javaResource() AST {
+ off := st.off
+ ln := st.number()
+ if ln <= 1 {
+ st.failEarlier("java resource length less than 1", st.off-off)
+ }
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after number")
+ }
+ st.advance(1)
+ ln--
+ if len(st.str) < ln {
+ st.fail("not enough characters for java resource length")
+ }
+ str := st.str[:ln]
+ final := ""
+ st.advance(ln)
+ for i := 0; i < len(str); i++ {
+ if str[i] != '$' {
+ final += string(str[i])
+ } else {
+ if len(str) <= i+1 {
+ st.failEarlier("java resource escape at end of string", 1)
+ }
+ i++
+ r, ok := map[byte]string{
+ 'S': "/",
+ '_': ".",
+ '$': "$",
+ }[str[i]]
+ if !ok {
+ st.failEarlier("unrecognized java resource escape", ln-i-1)
+ }
+ final += r
+ }
+ }
+ return &Special{Prefix: "java resource ", Val: &Name{Name: final}}
+}
+
+// <special-name> ::= TV <type>
+// ::= TT <type>
+// ::= TI <type>
+// ::= TS <type>
+// ::= TA <template-arg>
+// ::= GV <(object) name>
+// ::= T <call-offset> <(base) encoding>
+// ::= Tc <call-offset> <call-offset> <(base) encoding>
+// Also g++ extensions:
+// ::= TC <type> <(offset) number> _ <(base) type>
+// ::= TF <type>
+// ::= TJ <type>
+// ::= GR <name>
+// ::= GA <encoding>
+// ::= Gr <resource name>
+// ::= GTt <encoding>
+// ::= GTn <encoding>
+func (st *state) specialName() AST {
+ if st.str[0] == 'T' {
+ st.advance(1)
+ if len(st.str) == 0 {
+ st.fail("expected special name code")
+ }
+ c := st.str[0]
+ st.advance(1)
+ switch c {
+ case 'V':
+ t := st.demangleType(false)
+ return &Special{Prefix: "vtable for ", Val: t}
+ case 'T':
+ t := st.demangleType(false)
+ return &Special{Prefix: "VTT for ", Val: t}
+ case 'I':
+ t := st.demangleType(false)
+ return &Special{Prefix: "typeinfo for ", Val: t}
+ case 'S':
+ t := st.demangleType(false)
+ return &Special{Prefix: "typeinfo name for ", Val: t}
+ case 'A':
+ t := st.templateArg()
+ return &Special{Prefix: "template parameter object for ", Val: t}
+ case 'h':
+ st.callOffset('h')
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "non-virtual thunk to ", Val: v}
+ case 'v':
+ st.callOffset('v')
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "virtual thunk to ", Val: v}
+ case 'c':
+ st.callOffset(0)
+ st.callOffset(0)
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "covariant return thunk to ", Val: v}
+ case 'C':
+ derived := st.demangleType(false)
+ off := st.off
+ offset := st.number()
+ if offset < 0 {
+ st.failEarlier("expected positive offset", st.off-off)
+ }
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after number")
+ }
+ st.advance(1)
+ base := st.demangleType(false)
+ return &Special2{Prefix: "construction vtable for ", Val1: base, Middle: "-in-", Val2: derived}
+ case 'F':
+ t := st.demangleType(false)
+ return &Special{Prefix: "typeinfo fn for ", Val: t}
+ case 'J':
+ t := st.demangleType(false)
+ return &Special{Prefix: "java Class for ", Val: t}
+ case 'H':
+ n := st.name()
+ return &Special{Prefix: "TLS init function for ", Val: n}
+ case 'W':
+ n := st.name()
+ return &Special{Prefix: "TLS wrapper function for ", Val: n}
+ default:
+ st.fail("unrecognized special T name code")
+ panic("not reached")
+ }
+ } else {
+ st.checkChar('G')
+ if len(st.str) == 0 {
+ st.fail("expected special name code")
+ }
+ c := st.str[0]
+ st.advance(1)
+ switch c {
+ case 'V':
+ n := st.name()
+ return &Special{Prefix: "guard variable for ", Val: n}
+ case 'R':
+ n := st.name()
+ i := st.number()
+ return &Special{Prefix: fmt.Sprintf("reference temporary #%d for ", i), Val: n}
+ case 'A':
+ v := st.encoding(true, notForLocalName)
+ return &Special{Prefix: "hidden alias for ", Val: v}
+ case 'T':
+ if len(st.str) == 0 {
+ st.fail("expected special GT name code")
+ }
+ c := st.str[0]
+ st.advance(1)
+ v := st.encoding(true, notForLocalName)
+ switch c {
+ case 'n':
+ return &Special{Prefix: "non-transaction clone for ", Val: v}
+ default:
+ // The proposal is that different
+ // letters stand for different types
+ // of transactional cloning. Treat
+ // them all the same for now.
+ fallthrough
+ case 't':
+ return &Special{Prefix: "transaction clone for ", Val: v}
+ }
+ case 'r':
+ return st.javaResource()
+ default:
+ st.fail("unrecognized special G name code")
+ panic("not reached")
+ }
+ }
+}
+
+// <call-offset> ::= h <nv-offset> _
+// ::= v <v-offset> _
+//
+// <nv-offset> ::= <(offset) number>
+//
+// <v-offset> ::= <(offset) number> _ <(virtual offset) number>
+//
+// The c parameter, if not 0, is a character we just read which is the
+// start of the <call-offset>.
+//
+// We don't display the offset information anywhere.
+func (st *state) callOffset(c byte) {
+ if c == 0 {
+ if len(st.str) == 0 {
+ st.fail("missing call offset")
+ }
+ c = st.str[0]
+ st.advance(1)
+ }
+ switch c {
+ case 'h':
+ st.number()
+ case 'v':
+ st.number()
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after number")
+ }
+ st.advance(1)
+ st.number()
+ default:
+ st.failEarlier("unrecognized call offset code", 1)
+ }
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after call offset")
+ }
+ st.advance(1)
+}
+
+// builtinTypes maps the type letter to the type name.
+var builtinTypes = map[byte]string{
+ 'a': "signed char",
+ 'b': "bool",
+ 'c': "char",
+ 'd': "double",
+ 'e': "long double",
+ 'f': "float",
+ 'g': "__float128",
+ 'h': "unsigned char",
+ 'i': "int",
+ 'j': "unsigned int",
+ 'l': "long",
+ 'm': "unsigned long",
+ 'n': "__int128",
+ 'o': "unsigned __int128",
+ 's': "short",
+ 't': "unsigned short",
+ 'v': "void",
+ 'w': "wchar_t",
+ 'x': "long long",
+ 'y': "unsigned long long",
+ 'z': "...",
+}
+
+// <type> ::= <builtin-type>
+// ::= <function-type>
+// ::= <class-enum-type>
+// ::= <array-type>
+// ::= <pointer-to-member-type>
+// ::= <template-param>
+// ::= <template-template-param> <template-args>
+// ::= <substitution>
+// ::= <CV-qualifiers> <type>
+// ::= P <type>
+// ::= R <type>
+// ::= O <type> (C++0x)
+// ::= C <type>
+// ::= G <type>
+// ::= U <source-name> <type>
+//
+// <builtin-type> ::= various one letter codes
+// ::= u <source-name>
+func (st *state) demangleType(isCast bool) AST {
+ if len(st.str) == 0 {
+ st.fail("expected type")
+ }
+
+ addSubst := true
+
+ q := st.cvQualifiers()
+ if q != nil {
+ if len(st.str) == 0 {
+ st.fail("expected type")
+ }
+
+ // CV-qualifiers before a function type apply to
+ // 'this', so avoid adding the unqualified function
+ // type to the substitution list.
+ if st.str[0] == 'F' {
+ addSubst = false
+ }
+ }
+
+ var ret AST
+
+ // Use correct substitution for a template parameter.
+ var sub AST
+
+ if btype, ok := builtinTypes[st.str[0]]; ok {
+ ret = &BuiltinType{Name: btype}
+ st.advance(1)
+ if q != nil {
+ ret = &TypeWithQualifiers{Base: ret, Qualifiers: q}
+ st.subs.add(ret)
+ }
+ return ret
+ }
+ c := st.str[0]
+ switch c {
+ case 'u':
+ st.advance(1)
+ ret = st.sourceName()
+ case 'F':
+ ret = st.functionType()
+ case 'N', 'Z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ ret = st.name()
+ case 'A':
+ ret = st.arrayType(isCast)
+ case 'M':
+ ret = st.pointerToMemberType(isCast)
+ case 'T':
+ ret = st.templateParam()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ // See the function comment to explain this.
+ if !isCast {
+ st.subs.add(ret)
+ args := st.templateArgs()
+ ret = &Template{Name: ret, Args: args}
+ } else {
+ ret = st.demangleCastTemplateArgs(ret, true)
+ }
+ }
+ case 'S':
+ // If this is a special substitution, then it
+ // is the start of <class-enum-type>.
+ var c2 byte
+ if len(st.str) > 1 {
+ c2 = st.str[1]
+ }
+ if isDigit(c2) || c2 == '_' || isUpper(c2) {
+ ret = st.substitution(false)
+ if len(st.str) == 0 || st.str[0] != 'I' {
+ addSubst = false
+ } else {
+ // See the function comment to explain this.
+ if _, ok := ret.(*TemplateParam); !ok || !isCast {
+ args := st.templateArgs()
+ ret = &Template{Name: ret, Args: args}
+ } else {
+ next := st.demangleCastTemplateArgs(ret, false)
+ if next == ret {
+ addSubst = false
+ }
+ ret = next
+ }
+ }
+ } else {
+ ret = st.name()
+ // This substitution is not itself a
+ // substitution candidate, unless template
+ // arguments were added.
+ if ret == subAST[c2] || ret == verboseAST[c2] {
+ addSubst = false
+ }
+ }
+ case 'O', 'P', 'R', 'C', 'G':
+ st.advance(1)
+ t := st.demangleType(isCast)
+ switch c {
+ case 'O':
+ ret = &RvalueReferenceType{Base: t}
+ case 'P':
+ ret = &PointerType{Base: t}
+ case 'R':
+ ret = &ReferenceType{Base: t}
+ case 'C':
+ ret = &ComplexType{Base: t}
+ case 'G':
+ ret = &ImaginaryType{Base: t}
+ }
+ case 'U':
+ if len(st.str) < 2 {
+ st.fail("expected source name or unnamed type")
+ }
+ switch st.str[1] {
+ case 'l':
+ ret = st.closureTypeName()
+ addSubst = false
+ case 't':
+ ret = st.unnamedTypeName()
+ addSubst = false
+ default:
+ st.advance(1)
+ n := st.sourceName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ t := st.demangleType(isCast)
+ ret = &VendorQualifier{Qualifier: n, Type: t}
+ }
+ case 'D':
+ st.advance(1)
+ if len(st.str) == 0 {
+ st.fail("expected D code for type")
+ }
+ addSubst = false
+ c2 := st.str[0]
+ st.advance(1)
+ switch c2 {
+ case 'T', 't':
+ // decltype(expression)
+ ret = st.expression()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after expression in type")
+ }
+ st.advance(1)
+ ret = &Decltype{Expr: ret}
+ addSubst = true
+
+ case 'p':
+ t := st.demangleType(isCast)
+ pack := st.findArgumentPack(t)
+ ret = &PackExpansion{Base: t, Pack: pack}
+ addSubst = true
+
+ case 'a':
+ ret = &Name{Name: "auto"}
+ case 'c':
+ ret = &Name{Name: "decltype(auto)"}
+
+ case 'f':
+ ret = &BuiltinType{Name: "decimal32"}
+ case 'd':
+ ret = &BuiltinType{Name: "decimal64"}
+ case 'e':
+ ret = &BuiltinType{Name: "decimal128"}
+ case 'h':
+ ret = &BuiltinType{Name: "half"}
+ case 'u':
+ ret = &BuiltinType{Name: "char8_t"}
+ case 's':
+ ret = &BuiltinType{Name: "char16_t"}
+ case 'i':
+ ret = &BuiltinType{Name: "char32_t"}
+ case 'n':
+ ret = &BuiltinType{Name: "decltype(nullptr)"}
+
+ case 'F':
+ accum := false
+ if len(st.str) > 0 && isDigit(st.str[0]) {
+ accum = true
+ // We don't care about the bits.
+ _ = st.number()
+ }
+ base := st.demangleType(isCast)
+ if len(st.str) > 0 && isDigit(st.str[0]) {
+ // We don't care about the bits.
+ st.number()
+ }
+ sat := false
+ if len(st.str) > 0 {
+ if st.str[0] == 's' {
+ sat = true
+ }
+ st.advance(1)
+ }
+ ret = &FixedType{Base: base, Accum: accum, Sat: sat}
+
+ case 'v':
+ ret = st.vectorType(isCast)
+ addSubst = true
+
+ default:
+ st.fail("unrecognized D code in type")
+ }
+
+ default:
+ st.fail("unrecognized type code")
+ }
+
+ if addSubst {
+ if sub != nil {
+ st.subs.add(sub)
+ } else {
+ st.subs.add(ret)
+ }
+ }
+
+ if q != nil {
+ if _, ok := ret.(*FunctionType); ok {
+ ret = &MethodWithQualifiers{Method: ret, Qualifiers: q, RefQualifier: ""}
+ } else if mwq, ok := ret.(*MethodWithQualifiers); ok {
+ // Merge adjacent qualifiers. This case
+ // happens with a function with a trailing
+ // ref-qualifier.
+ mwq.Qualifiers = mergeQualifiers(q, mwq.Qualifiers)
+ } else {
+ // Merge adjacent qualifiers. This case
+ // happens with multi-dimensional array types.
+ if qsub, ok := ret.(*TypeWithQualifiers); ok {
+ q = mergeQualifiers(q, qsub.Qualifiers)
+ ret = qsub.Base
+ }
+ ret = &TypeWithQualifiers{Base: ret, Qualifiers: q}
+ }
+ st.subs.add(ret)
+ }
+
+ return ret
+}
+
+// demangleCastTemplateArgs is for a rather hideous parse. When we
+// see a template-param followed by a template-args, we need to decide
+// whether we have a template-param or a template-template-param.
+// Normally it is template-template-param, meaning that we pick up the
+// template arguments here. But, if we are parsing the type for a
+// cast operator, then the only way this can be template-template-param
+// is if there is another set of template-args immediately after this
+// set. That would look like this:
+//
+// <nested-name>
+// -> <template-prefix> <template-args>
+// -> <prefix> <template-unqualified-name> <template-args>
+// -> <unqualified-name> <template-unqualified-name> <template-args>
+// -> <source-name> <template-unqualified-name> <template-args>
+// -> <source-name> <operator-name> <template-args>
+// -> <source-name> cv <type> <template-args>
+// -> <source-name> cv <template-template-param> <template-args> <template-args>
+//
+// Otherwise, we have this derivation:
+//
+// <nested-name>
+// -> <template-prefix> <template-args>
+// -> <prefix> <template-unqualified-name> <template-args>
+// -> <unqualified-name> <template-unqualified-name> <template-args>
+// -> <source-name> <template-unqualified-name> <template-args>
+// -> <source-name> <operator-name> <template-args>
+// -> <source-name> cv <type> <template-args>
+// -> <source-name> cv <template-param> <template-args>
+//
+// in which the template-args are actually part of the prefix. For
+// the special case where this arises, demangleType is called with
+// isCast as true. This function is then responsible for checking
+// whether we see <template-param> <template-args> but there is not
+// another following <template-args>. In that case, we reset the
+// parse and just return the <template-param>.
+func (st *state) demangleCastTemplateArgs(tp AST, addSubst bool) AST {
+ save := st.copy()
+
+ var args []AST
+ failed := false
+ func() {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(demangleErr); ok {
+ failed = true
+ } else {
+ panic(r)
+ }
+ }
+ }()
+
+ args = st.templateArgs()
+ }()
+
+ if !failed && len(st.str) > 0 && st.str[0] == 'I' {
+ if addSubst {
+ st.subs.add(tp)
+ }
+ return &Template{Name: tp, Args: args}
+ }
+ // Reset back to before we started reading the template arguments.
+ // They will be read again by st.prefix.
+ *st = *save
+ return tp
+}
+
+// mergeQualifiers merges two qualifer lists into one.
+func mergeQualifiers(q1AST, q2AST AST) AST {
+ if q1AST == nil {
+ return q2AST
+ }
+ if q2AST == nil {
+ return q1AST
+ }
+ q1 := q1AST.(*Qualifiers)
+ m := make(map[string]bool)
+ for _, qualAST := range q1.Qualifiers {
+ qual := qualAST.(*Qualifier)
+ if len(qual.Exprs) == 0 {
+ m[qual.Name] = true
+ }
+ }
+ rq := q1.Qualifiers
+ for _, qualAST := range q2AST.(*Qualifiers).Qualifiers {
+ qual := qualAST.(*Qualifier)
+ if len(qual.Exprs) > 0 {
+ rq = append(rq, qualAST)
+ } else if !m[qual.Name] {
+ rq = append(rq, qualAST)
+ m[qual.Name] = true
+ }
+ }
+ q1.Qualifiers = rq
+ return q1
+}
+
+// qualifiers maps from the character used in the mangled name to the
+// string to print.
+var qualifiers = map[byte]string{
+ 'r': "restrict",
+ 'V': "volatile",
+ 'K': "const",
+}
+
+// <CV-qualifiers> ::= [r] [V] [K]
+func (st *state) cvQualifiers() AST {
+ var q []AST
+qualLoop:
+ for len(st.str) > 0 {
+ if qv, ok := qualifiers[st.str[0]]; ok {
+ qual := &Qualifier{Name: qv}
+ q = append([]AST{qual}, q...)
+ st.advance(1)
+ } else if len(st.str) > 1 && st.str[0] == 'D' {
+ var qual AST
+ switch st.str[1] {
+ case 'x':
+ qual = &Qualifier{Name: "transaction_safe"}
+ st.advance(2)
+ case 'o':
+ qual = &Qualifier{Name: "noexcept"}
+ st.advance(2)
+ case 'O':
+ st.advance(2)
+ expr := st.expression()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after computed noexcept expression")
+ }
+ st.advance(1)
+ qual = &Qualifier{Name: "noexcept", Exprs: []AST{expr}}
+ case 'w':
+ st.advance(2)
+ parmlist := st.parmlist()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after throw parameter list")
+ }
+ st.advance(1)
+ qual = &Qualifier{Name: "throw", Exprs: parmlist}
+ default:
+ break qualLoop
+ }
+ q = append([]AST{qual}, q...)
+ } else {
+ break
+ }
+ }
+ if len(q) == 0 {
+ return nil
+ }
+ return &Qualifiers{Qualifiers: q}
+}
+
+// <ref-qualifier> ::= R
+// ::= O
+func (st *state) refQualifier() string {
+ if len(st.str) > 0 {
+ switch st.str[0] {
+ case 'R':
+ st.advance(1)
+ return "&"
+ case 'O':
+ st.advance(1)
+ return "&&"
+ }
+ }
+ return ""
+}
+
+// <type>+
+func (st *state) parmlist() []AST {
+ var ret []AST
+ for {
+ if len(st.str) < 1 {
+ break
+ }
+ if st.str[0] == 'E' || st.str[0] == '.' {
+ break
+ }
+ if (st.str[0] == 'R' || st.str[0] == 'O') && len(st.str) > 1 && st.str[1] == 'E' {
+ // This is a function ref-qualifier.
+ break
+ }
+ ptype := st.demangleType(false)
+ ret = append(ret, ptype)
+ }
+
+ // There should always be at least one type. A function that
+ // takes no arguments will have a single parameter type
+ // "void".
+ if len(ret) == 0 {
+ st.fail("expected at least one type in type list")
+ }
+
+ // Omit a single parameter type void.
+ if len(ret) == 1 {
+ if bt, ok := ret[0].(*BuiltinType); ok && bt.Name == "void" {
+ ret = nil
+ }
+ }
+
+ return ret
+}
+
+// <function-type> ::= F [Y] <bare-function-type> [<ref-qualifier>] E
+func (st *state) functionType() AST {
+ st.checkChar('F')
+ if len(st.str) > 0 && st.str[0] == 'Y' {
+ // Function has C linkage. We don't print this.
+ st.advance(1)
+ }
+ ret := st.bareFunctionType(true)
+ r := st.refQualifier()
+ if r != "" {
+ ret = &MethodWithQualifiers{Method: ret, Qualifiers: nil, RefQualifier: r}
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after function type")
+ }
+ st.advance(1)
+ return ret
+}
+
+// <bare-function-type> ::= [J]<type>+
+func (st *state) bareFunctionType(hasReturnType bool) AST {
+ if len(st.str) > 0 && st.str[0] == 'J' {
+ hasReturnType = true
+ st.advance(1)
+ }
+ var returnType AST
+ if hasReturnType {
+ returnType = st.demangleType(false)
+ }
+ types := st.parmlist()
+ return &FunctionType{Return: returnType, Args: types}
+}
+
+// <array-type> ::= A <(positive dimension) number> _ <(element) type>
+// ::= A [<(dimension) expression>] _ <(element) type>
+func (st *state) arrayType(isCast bool) AST {
+ st.checkChar('A')
+
+ if len(st.str) == 0 {
+ st.fail("missing array dimension")
+ }
+
+ var dim AST
+ if st.str[0] == '_' {
+ dim = &Name{Name: ""}
+ } else if isDigit(st.str[0]) {
+ i := 1
+ for len(st.str) > i && isDigit(st.str[i]) {
+ i++
+ }
+ dim = &Name{Name: st.str[:i]}
+ st.advance(i)
+ } else {
+ dim = st.expression()
+ }
+
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after dimension")
+ }
+ st.advance(1)
+
+ t := st.demangleType(isCast)
+
+ arr := &ArrayType{Dimension: dim, Element: t}
+
+ // Qualifiers on the element of an array type go on the whole
+ // array type.
+ if q, ok := arr.Element.(*TypeWithQualifiers); ok {
+ return &TypeWithQualifiers{Base: &ArrayType{Dimension: dim, Element: q.Base}, Qualifiers: q.Qualifiers}
+ }
+
+ return arr
+}
+
+// <vector-type> ::= Dv <number> _ <type>
+// ::= Dv _ <expression> _ <type>
+func (st *state) vectorType(isCast bool) AST {
+ if len(st.str) == 0 {
+ st.fail("expected vector dimension")
+ }
+
+ var dim AST
+ if st.str[0] == '_' {
+ st.advance(1)
+ dim = st.expression()
+ } else {
+ num := st.number()
+ dim = &Name{Name: fmt.Sprintf("%d", num)}
+ }
+
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after vector dimension")
+ }
+ st.advance(1)
+
+ t := st.demangleType(isCast)
+
+ return &VectorType{Dimension: dim, Base: t}
+}
+
+// <pointer-to-member-type> ::= M <(class) type> <(member) type>
+func (st *state) pointerToMemberType(isCast bool) AST {
+ st.checkChar('M')
+ cl := st.demangleType(false)
+
+ // The ABI says, "The type of a non-static member function is
+ // considered to be different, for the purposes of
+ // substitution, from the type of a namespace-scope or static
+ // member function whose type appears similar. The types of
+ // two non-static member functions are considered to be
+ // different, for the purposes of substitution, if the
+ // functions are members of different classes. In other words,
+ // for the purposes of substitution, the class of which the
+ // function is a member is considered part of the type of
+ // function."
+ //
+ // For a pointer to member function, this call to demangleType
+ // will end up adding a (possibly qualified) non-member
+ // function type to the substitution table, which is not
+ // correct; however, the member function type will never be
+ // used in a substitution, so putting the wrong type in the
+ // substitution table is harmless.
+ mem := st.demangleType(isCast)
+ return &PtrMem{Class: cl, Member: mem}
+}
+
+// <non-negative number> _ */
+func (st *state) compactNumber() int {
+ if len(st.str) == 0 {
+ st.fail("missing index")
+ }
+ if st.str[0] == '_' {
+ st.advance(1)
+ return 0
+ } else if st.str[0] == 'n' {
+ st.fail("unexpected negative number")
+ }
+ n := st.number()
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("missing underscore after number")
+ }
+ st.advance(1)
+ return n + 1
+}
+
+// <template-param> ::= T_
+// ::= T <(parameter-2 non-negative) number> _
+//
+// When a template parameter is a substitution candidate, any
+// reference to that substitution refers to the template parameter
+// with the same index in the currently active template, not to
+// whatever the template parameter would be expanded to here. We sort
+// this out in substitution and simplify.
+func (st *state) templateParam() AST {
+ if len(st.templates) == 0 && st.inLambda == 0 {
+ st.fail("template parameter not in scope of template")
+ }
+ off := st.off
+
+ st.checkChar('T')
+ n := st.compactNumber()
+
+ if st.inLambda > 0 {
+ // g++ mangles lambda auto params as template params.
+ // Apparently we can't encounter a template within a lambda.
+ // See https://gcc.gnu.org/PR78252.
+ return &LambdaAuto{Index: n}
+ }
+
+ template := st.templates[len(st.templates)-1]
+
+ if template == nil {
+ // We are parsing a cast operator. If the cast is
+ // itself a template, then this is a forward
+ // reference. Fill it in later.
+ return &TemplateParam{Index: n, Template: nil}
+ }
+
+ if n >= len(template.Args) {
+ st.failEarlier(fmt.Sprintf("template index out of range (%d >= %d)", n, len(template.Args)), st.off-off)
+ }
+
+ return &TemplateParam{Index: n, Template: template}
+}
+
+// setTemplate sets the Template field of any TemplateParam's in a.
+// This handles the forward referencing template parameters found in
+// cast operators.
+func (st *state) setTemplate(a AST, tmpl *Template) {
+ var seen []AST
+ a.Traverse(func(a AST) bool {
+ switch a := a.(type) {
+ case *TemplateParam:
+ if a.Template != nil {
+ if tmpl != nil {
+ st.fail("duplicate template parameters")
+ }
+ return false
+ }
+ if tmpl == nil {
+ st.fail("cast template parameter not in scope of template")
+ }
+ if a.Index >= len(tmpl.Args) {
+ st.fail(fmt.Sprintf("cast template index out of range (%d >= %d)", a.Index, len(tmpl.Args)))
+ }
+ a.Template = tmpl
+ return false
+ case *Closure:
+ // There are no template params in closure types.
+ // https://gcc.gnu.org/PR78252.
+ return false
+ default:
+ for _, v := range seen {
+ if v == a {
+ return false
+ }
+ }
+ seen = append(seen, a)
+ return true
+ }
+ })
+}
+
+// clearTemplateArgs gives an error for any unset Template field in
+// args. This handles erroneous cases where a cast operator with a
+// forward referenced template is in the scope of another cast
+// operator.
+func (st *state) clearTemplateArgs(args []AST) {
+ for _, a := range args {
+ st.setTemplate(a, nil)
+ }
+}
+
+// <template-args> ::= I <template-arg>+ E
+func (st *state) templateArgs() []AST {
+ if len(st.str) == 0 || (st.str[0] != 'I' && st.str[0] != 'J') {
+ panic("internal error")
+ }
+ st.advance(1)
+
+ var ret []AST
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ arg := st.templateArg()
+ ret = append(ret, arg)
+ }
+ st.advance(1)
+ return ret
+}
+
+// <template-arg> ::= <type>
+// ::= X <expression> E
+// ::= <expr-primary>
+func (st *state) templateArg() AST {
+ if len(st.str) == 0 {
+ st.fail("missing template argument")
+ }
+ switch st.str[0] {
+ case 'X':
+ st.advance(1)
+ expr := st.expression()
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("missing end of expression")
+ }
+ st.advance(1)
+ return expr
+
+ case 'L':
+ return st.exprPrimary()
+
+ case 'I', 'J':
+ args := st.templateArgs()
+ return &ArgumentPack{Args: args}
+
+ default:
+ return st.demangleType(false)
+ }
+}
+
+// exprList parses a sequence of expressions up to a terminating character.
+func (st *state) exprList(stop byte) AST {
+ if len(st.str) > 0 && st.str[0] == stop {
+ st.advance(1)
+ return &ExprList{Exprs: nil}
+ }
+
+ var exprs []AST
+ for {
+ e := st.expression()
+ exprs = append(exprs, e)
+ if len(st.str) > 0 && st.str[0] == stop {
+ st.advance(1)
+ break
+ }
+ }
+ return &ExprList{Exprs: exprs}
+}
+
+// <expression> ::= <(unary) operator-name> <expression>
+// ::= <(binary) operator-name> <expression> <expression>
+// ::= <(trinary) operator-name> <expression> <expression> <expression>
+// ::= pp_ <expression>
+// ::= mm_ <expression>
+// ::= cl <expression>+ E
+// ::= cl <expression>+ E
+// ::= cv <type> <expression>
+// ::= cv <type> _ <expression>* E
+// ::= tl <type> <braced-expression>* E
+// ::= il <braced-expression>* E
+// ::= [gs] nw <expression>* _ <type> E
+// ::= [gs] nw <expression>* _ <type> <initializer>
+// ::= [gs] na <expression>* _ <type> E
+// ::= [gs] na <expression>* _ <type> <initializer>
+// ::= [gs] dl <expression>
+// ::= [gs] da <expression>
+// ::= dc <type> <expression>
+// ::= sc <type> <expression>
+// ::= cc <type> <expression>
+// ::= rc <type> <expression>
+// ::= ti <type>
+// ::= te <expression>
+// ::= st <type>
+// ::= sz <expression>
+// ::= at <type>
+// ::= az <expression>
+// ::= nx <expression>
+// ::= <template-param>
+// ::= <function-param>
+// ::= dt <expression> <unresolved-name>
+// ::= pt <expression> <unresolved-name>
+// ::= ds <expression> <expression>
+// ::= sZ <template-param>
+// ::= sZ <function-param>
+// ::= sP <template-arg>* E
+// ::= sp <expression>
+// ::= fl <binary operator-name> <expression>
+// ::= fr <binary operator-name> <expression>
+// ::= fL <binary operator-name> <expression> <expression>
+// ::= fR <binary operator-name> <expression> <expression>
+// ::= tw <expression>
+// ::= tr
+// ::= <unresolved-name>
+// ::= <expr-primary>
+//
+// <function-param> ::= fp <CV-qualifiers> _
+// ::= fp <CV-qualifiers> <number>
+// ::= fL <number> p <CV-qualifiers> _
+// ::= fL <number> p <CV-qualifiers> <number>
+// ::= fpT
+//
+// <braced-expression> ::= <expression>
+// ::= di <field source-name> <braced-expression>
+// ::= dx <index expression> <braced-expression>
+// ::= dX <range begin expression> <range end expression> <braced-expression>
+//
+func (st *state) expression() AST {
+ if len(st.str) == 0 {
+ st.fail("expected expression")
+ }
+ if st.str[0] == 'L' {
+ return st.exprPrimary()
+ } else if st.str[0] == 'T' {
+ return st.templateParam()
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'r' {
+ return st.unresolvedName()
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'p' {
+ st.advance(2)
+ e := st.expression()
+ pack := st.findArgumentPack(e)
+ return &PackExpansion{Base: e, Pack: pack}
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'Z' {
+ st.advance(2)
+ off := st.off
+ e := st.expression()
+ ap := st.findArgumentPack(e)
+ if ap == nil {
+ st.failEarlier("missing argument pack", st.off-off)
+ }
+ return &SizeofPack{Pack: ap}
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 'P' {
+ st.advance(2)
+ var args []AST
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ arg := st.templateArg()
+ args = append(args, arg)
+ }
+ st.advance(1)
+ return &SizeofArgs{Args: args}
+ } else if st.str[0] == 'f' && len(st.str) > 1 && st.str[1] == 'p' {
+ st.advance(2)
+ if len(st.str) > 0 && st.str[0] == 'T' {
+ st.advance(1)
+ return &FunctionParam{Index: 0}
+ } else {
+ // We can see qualifiers here, but we don't
+ // include them in the demangled string.
+ st.cvQualifiers()
+ index := st.compactNumber()
+ return &FunctionParam{Index: index + 1}
+ }
+ } else if st.str[0] == 'f' && len(st.str) > 2 && st.str[1] == 'L' && isDigit(st.str[2]) {
+ st.advance(2)
+ // We don't include the scope count in the demangled string.
+ st.number()
+ if len(st.str) == 0 || st.str[0] != 'p' {
+ st.fail("expected p after function parameter scope count")
+ }
+ st.advance(1)
+ // We can see qualifiers here, but we don't include them
+ // in the demangled string.
+ st.cvQualifiers()
+ index := st.compactNumber()
+ return &FunctionParam{Index: index + 1}
+ } else if isDigit(st.str[0]) || (st.str[0] == 'o' && len(st.str) > 1 && st.str[1] == 'n') {
+ if st.str[0] == 'o' {
+ // Skip operator function ID.
+ st.advance(2)
+ }
+ n, _ := st.unqualifiedName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ return n
+ } else if (st.str[0] == 'i' || st.str[0] == 't') && len(st.str) > 1 && st.str[1] == 'l' {
+ // Brace-enclosed initializer list.
+ c := st.str[0]
+ st.advance(2)
+ var t AST
+ if c == 't' {
+ t = st.demangleType(false)
+ }
+ exprs := st.exprList('E')
+ return &InitializerList{Type: t, Exprs: exprs}
+ } else if st.str[0] == 's' && len(st.str) > 1 && st.str[1] == 't' {
+ o, _ := st.operatorName(true)
+ t := st.demangleType(false)
+ return &Unary{Op: o, Expr: t, Suffix: false, SizeofType: true}
+ } else {
+ if len(st.str) < 2 {
+ st.fail("missing operator code")
+ }
+ code := st.str[:2]
+ o, args := st.operatorName(true)
+ switch args {
+ case 0:
+ return &Nullary{Op: o}
+
+ case 1:
+ suffix := false
+ if code == "pp" || code == "mm" {
+ if len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ } else {
+ suffix = true
+ }
+ }
+ var operand AST
+ if _, ok := o.(*Cast); ok && len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ operand = st.exprList('E')
+ } else {
+ operand = st.expression()
+ }
+ return &Unary{Op: o, Expr: operand, Suffix: suffix, SizeofType: false}
+
+ case 2:
+ var left, right AST
+ if code == "sc" || code == "dc" || code == "cc" || code == "rc" {
+ left = st.demangleType(false)
+ } else if code[0] == 'f' {
+ left, _ = st.operatorName(true)
+ right = st.expression()
+ return &Fold{Left: code[1] == 'l', Op: left, Arg1: right, Arg2: nil}
+ } else if code == "di" {
+ left, _ = st.unqualifiedName()
+ } else {
+ left = st.expression()
+ }
+ if code == "cl" || code == "cp" {
+ right = st.exprList('E')
+ } else if code == "dt" || code == "pt" {
+ right = st.unresolvedName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ right = &Template{Name: right, Args: args}
+ }
+ } else {
+ right = st.expression()
+ }
+ return &Binary{Op: o, Left: left, Right: right}
+
+ case 3:
+ if code[0] == 'n' {
+ if code[1] != 'w' && code[1] != 'a' {
+ panic("internal error")
+ }
+ place := st.exprList('_')
+ if place.(*ExprList).Exprs == nil {
+ place = nil
+ }
+ t := st.demangleType(false)
+ var ini AST
+ if len(st.str) > 0 && st.str[0] == 'E' {
+ st.advance(1)
+ } else if len(st.str) > 1 && st.str[0] == 'p' && st.str[1] == 'i' {
+ // Parenthesized initializer.
+ st.advance(2)
+ ini = st.exprList('E')
+ } else if len(st.str) > 1 && st.str[0] == 'i' && st.str[1] == 'l' {
+ // Initializer list.
+ ini = st.expression()
+ } else {
+ st.fail("unrecognized new initializer")
+ }
+ return &New{Op: o, Place: place, Type: t, Init: ini}
+ } else if code[0] == 'f' {
+ first, _ := st.operatorName(true)
+ second := st.expression()
+ third := st.expression()
+ return &Fold{Left: code[1] == 'L', Op: first, Arg1: second, Arg2: third}
+ } else {
+ first := st.expression()
+ second := st.expression()
+ third := st.expression()
+ return &Trinary{Op: o, First: first, Second: second, Third: third}
+ }
+
+ default:
+ st.fail(fmt.Sprintf("unsupported number of operator arguments: %d", args))
+ panic("not reached")
+ }
+ }
+}
+
+// <unresolved-name> ::= [gs] <base-unresolved-name>
+// ::= sr <unresolved-type> <base-unresolved-name>
+// ::= srN <unresolved-type> <unresolved-qualifier-level>+ E <base-unresolved-name>
+// ::= [gs] sr <unresolved-qualifier-level>+ E <base-unresolved-name>
+func (st *state) unresolvedName() AST {
+ if len(st.str) >= 2 && st.str[:2] == "gs" {
+ st.advance(2)
+ n := st.unresolvedName()
+ return &Unary{
+ Op: &Operator{Name: "::"},
+ Expr: n,
+ Suffix: false,
+ SizeofType: false,
+ }
+ } else if len(st.str) >= 2 && st.str[:2] == "sr" {
+ st.advance(2)
+ if len(st.str) == 0 {
+ st.fail("expected unresolved type")
+ }
+ switch st.str[0] {
+ case 'T', 'D', 'S':
+ t := st.demangleType(false)
+ n := st.baseUnresolvedName()
+ n = &Qualified{Scope: t, Name: n, LocalName: false}
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ st.subs.add(n)
+ }
+ return n
+ default:
+ var s AST
+ if st.str[0] == 'N' {
+ st.advance(1)
+ s = st.demangleType(false)
+ }
+ for len(st.str) == 0 || st.str[0] != 'E' {
+ // GCC does not seem to follow the ABI here.
+ // It can emit type/name without an 'E'.
+ if s != nil && len(st.str) > 0 && !isDigit(st.str[0]) {
+ if q, ok := s.(*Qualified); ok {
+ a := q.Scope
+ if t, ok := a.(*Template); ok {
+ st.subs.add(t.Name)
+ st.subs.add(t)
+ } else {
+ st.subs.add(a)
+ }
+ return s
+ }
+ }
+ n := st.sourceName()
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ st.subs.add(n)
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ if s == nil {
+ s = n
+ } else {
+ s = &Qualified{Scope: s, Name: n, LocalName: false}
+ }
+ st.subs.add(s)
+ }
+ if s == nil {
+ st.fail("missing scope in unresolved name")
+ }
+ st.advance(1)
+ n := st.baseUnresolvedName()
+ return &Qualified{Scope: s, Name: n, LocalName: false}
+ }
+ } else {
+ return st.baseUnresolvedName()
+ }
+}
+
+// <base-unresolved-name> ::= <simple-id>
+// ::= on <operator-name>
+// ::= on <operator-name> <template-args>
+// ::= dn <destructor-name>
+//
+//<simple-id> ::= <source-name> [ <template-args> ]
+func (st *state) baseUnresolvedName() AST {
+ var n AST
+ if len(st.str) >= 2 && st.str[:2] == "on" {
+ st.advance(2)
+ n, _ = st.operatorName(true)
+ } else if len(st.str) >= 2 && st.str[:2] == "dn" {
+ st.advance(2)
+ if len(st.str) > 0 && isDigit(st.str[0]) {
+ n = st.sourceName()
+ } else {
+ n = st.demangleType(false)
+ }
+ n = &Destructor{Name: n}
+ } else if len(st.str) > 0 && isDigit(st.str[0]) {
+ n = st.sourceName()
+ } else {
+ // GCC seems to not follow the ABI here: it can have
+ // an operator name without on.
+ // See https://gcc.gnu.org/PR70182.
+ n, _ = st.operatorName(true)
+ }
+ if len(st.str) > 0 && st.str[0] == 'I' {
+ args := st.templateArgs()
+ n = &Template{Name: n, Args: args}
+ }
+ return n
+}
+
+// <expr-primary> ::= L <type> <(value) number> E
+// ::= L <type> <(value) float> E
+// ::= L <mangled-name> E
+func (st *state) exprPrimary() AST {
+ st.checkChar('L')
+ if len(st.str) == 0 {
+ st.fail("expected primary expression")
+
+ }
+
+ // Check for 'Z' here because g++ incorrectly omitted the
+ // underscore until -fabi-version=3.
+ var ret AST
+ if st.str[0] == '_' || st.str[0] == 'Z' {
+ if st.str[0] == '_' {
+ st.advance(1)
+ }
+ if len(st.str) == 0 || st.str[0] != 'Z' {
+ st.fail("expected mangled name")
+ }
+ st.advance(1)
+ ret = st.encoding(true, notForLocalName)
+ } else {
+ t := st.demangleType(false)
+
+ neg := false
+ if len(st.str) > 0 && st.str[0] == 'n' {
+ neg = true
+ st.advance(1)
+ }
+ if len(st.str) > 0 && st.str[0] == 'E' {
+ if bt, ok := t.(*BuiltinType); ok && bt.Name == "decltype(nullptr)" {
+ // A nullptr should not have a value.
+ // We accept one if present because GCC
+ // used to generate one.
+ // https://gcc.gnu.org/PR91979.
+ } else {
+ st.fail("missing literal value")
+ }
+ }
+ i := 0
+ for len(st.str) > i && st.str[i] != 'E' {
+ i++
+ }
+ val := st.str[:i]
+ st.advance(i)
+ ret = &Literal{Type: t, Val: val, Neg: neg}
+ }
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after literal")
+ }
+ st.advance(1)
+ return ret
+}
+
+// <discriminator> ::= _ <(non-negative) number> (when number < 10)
+// __ <(non-negative) number> _ (when number >= 10)
+func (st *state) discriminator(a AST) AST {
+ if len(st.str) == 0 || st.str[0] != '_' {
+ return a
+ }
+ off := st.off
+ st.advance(1)
+ trailingUnderscore := false
+ if len(st.str) > 0 && st.str[0] == '_' {
+ st.advance(1)
+ trailingUnderscore = true
+ }
+ d := st.number()
+ if d < 0 {
+ st.failEarlier("invalid negative discriminator", st.off-off)
+ }
+ if trailingUnderscore && d >= 10 {
+ if len(st.str) == 0 || st.str[0] != '_' {
+ st.fail("expected _ after discriminator >= 10")
+ }
+ st.advance(1)
+ }
+ // We don't currently print out the discriminator, so we don't
+ // save it.
+ return a
+}
+
+// <closure-type-name> ::= Ul <lambda-sig> E [ <nonnegative number> ] _
+func (st *state) closureTypeName() AST {
+ st.checkChar('U')
+ st.checkChar('l')
+ st.inLambda++
+ types := st.parmlist()
+ st.inLambda--
+ if len(st.str) == 0 || st.str[0] != 'E' {
+ st.fail("expected E after closure type name")
+ }
+ st.advance(1)
+ num := st.compactNumber()
+ return &Closure{Types: types, Num: num}
+}
+
+// <unnamed-type-name> ::= Ut [ <nonnegative number> ] _
+func (st *state) unnamedTypeName() AST {
+ st.checkChar('U')
+ st.checkChar('t')
+ num := st.compactNumber()
+ ret := &UnnamedType{Num: num}
+ st.subs.add(ret)
+ return ret
+}
+
+// Recognize a clone suffix. These are not part of the mangling API,
+// but are added by GCC when cloning functions.
+func (st *state) cloneSuffix(a AST) AST {
+ i := 0
+ if len(st.str) > 1 && st.str[0] == '.' && (isLower(st.str[1]) || st.str[1] == '_') {
+ i += 2
+ for len(st.str) > i && (isLower(st.str[i]) || st.str[i] == '_') {
+ i++
+ }
+ }
+ for len(st.str) > i+1 && st.str[i] == '.' && isDigit(st.str[i+1]) {
+ i += 2
+ for len(st.str) > i && isDigit(st.str[i]) {
+ i++
+ }
+ }
+ suffix := st.str[:i]
+ st.advance(i)
+ return &Clone{Base: a, Suffix: suffix}
+}
+
+// substitutions is the list of substitution candidates that may
+// appear later in the string.
+type substitutions []AST
+
+// add adds a new substitution candidate.
+func (subs *substitutions) add(a AST) {
+ *subs = append(*subs, a)
+}
+
+// subAST maps standard substitution codes to the corresponding AST.
+var subAST = map[byte]AST{
+ 't': &Name{Name: "std"},
+ 'a': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}},
+ 'b': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}},
+ 's': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "string"}},
+ 'i': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "istream"}},
+ 'o': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "ostream"}},
+ 'd': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "iostream"}},
+}
+
+// verboseAST maps standard substitution codes to the long form of the
+// corresponding AST. We use this when the Verbose option is used, to
+// match the standard demangler.
+var verboseAST = map[byte]AST{
+ 't': &Name{Name: "std"},
+ 'a': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}},
+ 'b': &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}},
+
+ // std::basic_string<char, std::char_traits<char>, std::allocator<char> >
+ 's': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_string"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "allocator"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+ // std::basic_istream<char, std::char_traits<char> >
+ 'i': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_istream"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+ // std::basic_ostream<char, std::char_traits<char> >
+ 'o': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_ostream"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+ // std::basic_iostream<char, std::char_traits<char> >
+ 'd': &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "basic_iostream"}},
+ Args: []AST{
+ &BuiltinType{Name: "char"},
+ &Template{
+ Name: &Qualified{Scope: &Name{Name: "std"}, Name: &Name{Name: "char_traits"}},
+ Args: []AST{&BuiltinType{Name: "char"}}}}},
+}
+
+// <substitution> ::= S <seq-id> _
+// ::= S_
+// ::= St
+// ::= Sa
+// ::= Sb
+// ::= Ss
+// ::= Si
+// ::= So
+// ::= Sd
+func (st *state) substitution(forPrefix bool) AST {
+ st.checkChar('S')
+ if len(st.str) == 0 {
+ st.fail("missing substitution index")
+ }
+ c := st.str[0]
+ st.advance(1)
+ dec := 1
+ if c == '_' || isDigit(c) || isUpper(c) {
+ id := 0
+ if c != '_' {
+ for c != '_' {
+ // Don't overflow a 32-bit int.
+ if id >= 0x80000000/36-36 {
+ st.fail("substitution index overflow")
+ }
+ if isDigit(c) {
+ id = id*36 + int(c-'0')
+ } else if isUpper(c) {
+ id = id*36 + int(c-'A') + 10
+ } else {
+ st.fail("invalid character in substitution index")
+ }
+
+ if len(st.str) == 0 {
+ st.fail("missing end to substitution index")
+ }
+ c = st.str[0]
+ st.advance(1)
+ dec++
+ }
+ id++
+ }
+
+ if id >= len(st.subs) {
+ st.failEarlier(fmt.Sprintf("substitution index out of range (%d >= %d)", id, len(st.subs)), dec)
+ }
+
+ ret := st.subs[id]
+
+ // We need to update any references to template
+ // parameters to refer to the currently active
+ // template.
+
+ // When copying a Typed we may need to adjust
+ // the templates.
+ copyTemplates := st.templates
+ var oldInLambda []int
+
+ // pushTemplate is called from skip, popTemplate from copy.
+ pushTemplate := func(template *Template) {
+ copyTemplates = append(copyTemplates, template)
+ oldInLambda = append(oldInLambda, st.inLambda)
+ st.inLambda = 0
+ }
+ popTemplate := func() {
+ copyTemplates = copyTemplates[:len(copyTemplates)-1]
+ st.inLambda = oldInLambda[len(oldInLambda)-1]
+ oldInLambda = oldInLambda[:len(oldInLambda)-1]
+ }
+
+ copy := func(a AST) AST {
+ var index int
+ switch a := a.(type) {
+ case *Typed:
+ // Remove the template added in skip.
+ if _, ok := a.Name.(*Template); ok {
+ popTemplate()
+ }
+ return nil
+ case *Closure:
+ // Undo the decrement in skip.
+ st.inLambda--
+ return nil
+ case *TemplateParam:
+ index = a.Index
+ case *LambdaAuto:
+ // A lambda auto parameter is represented
+ // as a template parameter, so we may have
+ // to change back when substituting.
+ index = a.Index
+ default:
+ return nil
+ }
+ if st.inLambda > 0 {
+ if _, ok := a.(*LambdaAuto); ok {
+ return nil
+ }
+ return &LambdaAuto{Index: index}
+ }
+ var template *Template
+ if len(copyTemplates) > 0 {
+ template = copyTemplates[len(copyTemplates)-1]
+ } else if rt, ok := ret.(*Template); ok {
+ // At least with clang we can see a template
+ // to start, and sometimes we need to refer
+ // to it. There is probably something wrong
+ // here.
+ template = rt
+ } else {
+ st.failEarlier("substituted template parameter not in scope of template", dec)
+ }
+ if template == nil {
+ // This template parameter is within
+ // the scope of a cast operator.
+ return &TemplateParam{Index: index, Template: nil}
+ }
+
+ if index >= len(template.Args) {
+ st.failEarlier(fmt.Sprintf("substituted template index out of range (%d >= %d)", index, len(template.Args)), dec)
+ }
+
+ return &TemplateParam{Index: index, Template: template}
+ }
+ var seen []AST
+ skip := func(a AST) bool {
+ switch a := a.(type) {
+ case *Typed:
+ if template, ok := a.Name.(*Template); ok {
+ // This template is removed in copy.
+ pushTemplate(template)
+ }
+ return false
+ case *Closure:
+ // This is decremented in copy.
+ st.inLambda++
+ return false
+ case *TemplateParam, *LambdaAuto:
+ return false
+ }
+ for _, v := range seen {
+ if v == a {
+ return true
+ }
+ }
+ seen = append(seen, a)
+ return false
+ }
+
+ if c := ret.Copy(copy, skip); c != nil {
+ return c
+ }
+
+ return ret
+ } else {
+ m := subAST
+ if st.verbose {
+ m = verboseAST
+ }
+ // For compatibility with the standard demangler, use
+ // a longer name for a constructor or destructor.
+ if forPrefix && len(st.str) > 0 && (st.str[0] == 'C' || st.str[0] == 'D') {
+ m = verboseAST
+ }
+ a, ok := m[c]
+ if !ok {
+ st.failEarlier("unrecognized substitution code", 1)
+ }
+
+ if len(st.str) > 0 && st.str[0] == 'B' {
+ a = st.taggedName(a)
+ st.subs.add(a)
+ }
+
+ return a
+ }
+}
+
+// isDigit returns whetner c is a digit for demangling purposes.
+func isDigit(c byte) bool {
+ return c >= '0' && c <= '9'
+}
+
+// isUpper returns whether c is an upper case letter for demangling purposes.
+func isUpper(c byte) bool {
+ return c >= 'A' && c <= 'Z'
+}
+
+// isLower returns whether c is a lower case letter for demangling purposes.
+func isLower(c byte) bool {
+ return c >= 'a' && c <= 'z'
+}
+
+// simplify replaces template parameters with their expansions, and
+// merges qualifiers.
+func simplify(a AST) AST {
+ var seen []AST
+ skip := func(a AST) bool {
+ for _, v := range seen {
+ if v == a {
+ return true
+ }
+ }
+ seen = append(seen, a)
+ return false
+ }
+ if r := a.Copy(simplifyOne, skip); r != nil {
+ return r
+ }
+ return a
+}
+
+// simplifyOne simplifies a single AST. It returns nil if there is
+// nothing to do.
+func simplifyOne(a AST) AST {
+ switch a := a.(type) {
+ case *TemplateParam:
+ if a.Template != nil && a.Index < len(a.Template.Args) {
+ return a.Template.Args[a.Index]
+ }
+ case *MethodWithQualifiers:
+ if m, ok := a.Method.(*MethodWithQualifiers); ok {
+ ref := a.RefQualifier
+ if ref == "" {
+ ref = m.RefQualifier
+ } else if m.RefQualifier != "" {
+ if ref == "&" || m.RefQualifier == "&" {
+ ref = "&"
+ }
+ }
+ return &MethodWithQualifiers{Method: m.Method, Qualifiers: mergeQualifiers(a.Qualifiers, m.Qualifiers), RefQualifier: ref}
+ }
+ if t, ok := a.Method.(*TypeWithQualifiers); ok {
+ return &MethodWithQualifiers{Method: t.Base, Qualifiers: mergeQualifiers(a.Qualifiers, t.Qualifiers), RefQualifier: a.RefQualifier}
+ }
+ case *TypeWithQualifiers:
+ if ft, ok := a.Base.(*FunctionType); ok {
+ return &MethodWithQualifiers{Method: ft, Qualifiers: a.Qualifiers, RefQualifier: ""}
+ }
+ if t, ok := a.Base.(*TypeWithQualifiers); ok {
+ return &TypeWithQualifiers{Base: t.Base, Qualifiers: mergeQualifiers(a.Qualifiers, t.Qualifiers)}
+ }
+ if m, ok := a.Base.(*MethodWithQualifiers); ok {
+ return &MethodWithQualifiers{Method: m.Method, Qualifiers: mergeQualifiers(a.Qualifiers, m.Qualifiers), RefQualifier: m.RefQualifier}
+ }
+ case *ReferenceType:
+ if rt, ok := a.Base.(*ReferenceType); ok {
+ return rt
+ }
+ if rrt, ok := a.Base.(*RvalueReferenceType); ok {
+ return &ReferenceType{Base: rrt.Base}
+ }
+ case *RvalueReferenceType:
+ if rrt, ok := a.Base.(*RvalueReferenceType); ok {
+ return rrt
+ }
+ if rt, ok := a.Base.(*ReferenceType); ok {
+ return rt
+ }
+ case *ArrayType:
+ // Qualifiers on the element of an array type
+ // go on the whole array type.
+ if q, ok := a.Element.(*TypeWithQualifiers); ok {
+ return &TypeWithQualifiers{
+ Base: &ArrayType{Dimension: a.Dimension, Element: q.Base},
+ Qualifiers: q.Qualifiers,
+ }
+ }
+ case *PackExpansion:
+ // Expand the pack and replace it with a list of
+ // expressions.
+ if a.Pack != nil {
+ exprs := make([]AST, len(a.Pack.Args))
+ for i, arg := range a.Pack.Args {
+ copy := func(sub AST) AST {
+ // Replace the ArgumentPack
+ // with a specific argument.
+ if sub == a.Pack {
+ return arg
+ }
+ // Copy everything else.
+ return nil
+ }
+
+ var seen []AST
+ skip := func(sub AST) bool {
+ // Don't traverse into another
+ // pack expansion.
+ if _, ok := sub.(*PackExpansion); ok {
+ return true
+ }
+ for _, v := range seen {
+ if v == sub {
+ return true
+ }
+ }
+ seen = append(seen, sub)
+ return false
+ }
+
+ b := a.Base.Copy(copy, skip)
+ if b == nil {
+ b = a.Base
+ }
+ exprs[i] = simplify(b)
+ }
+ return &ExprList{Exprs: exprs}
+ }
+ }
+ return nil
+}
+
+// findArgumentPack walks the AST looking for the argument pack for a
+// pack expansion. We find it via a template parameter.
+func (st *state) findArgumentPack(a AST) *ArgumentPack {
+ var seen []AST
+ var ret *ArgumentPack
+ a.Traverse(func(a AST) bool {
+ if ret != nil {
+ return false
+ }
+ switch a := a.(type) {
+ case *TemplateParam:
+ if a.Template == nil || a.Index >= len(a.Template.Args) {
+ return true
+ }
+ if pack, ok := a.Template.Args[a.Index].(*ArgumentPack); ok {
+ ret = pack
+ return false
+ }
+ case *PackExpansion, *Closure, *Name:
+ return false
+ case *TaggedName, *Operator, *BuiltinType, *FunctionParam:
+ return false
+ case *UnnamedType, *FixedType, *DefaultArg:
+ return false
+ }
+ for _, v := range seen {
+ if v == a {
+ return false
+ }
+ }
+ seen = append(seen, a)
+ return true
+ })
+ return ret
+}