summaryrefslogtreecommitdiffstats
path: root/src/go/collectors/go.d.plugin/pkg
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-05 12:08:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-05 12:08:18 +0000
commit5da14042f70711ea5cf66e034699730335462f66 (patch)
tree0f6354ccac934ed87a2d555f45be4c831cf92f4a /src/go/collectors/go.d.plugin/pkg
parentReleasing debian version 1.44.3-2. (diff)
downloadnetdata-5da14042f70711ea5cf66e034699730335462f66.tar.xz
netdata-5da14042f70711ea5cf66e034699730335462f66.zip
Merging upstream version 1.45.3+dfsg.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/go/collectors/go.d.plugin/pkg')
-rw-r--r--src/go/collectors/go.d.plugin/pkg/README.md22
-rw-r--r--src/go/collectors/go.d.plugin/pkg/buildinfo/version.go6
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/README.md37
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/parse.go138
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/parse_test.go258
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/pool.go40
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/pool_test.go104
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/range.go100
-rw-r--r--src/go/collectors/go.d.plugin/pkg/iprange/range_test.go200
-rw-r--r--src/go/collectors/go.d.plugin/pkg/k8sclient/k8sclient.go71
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/csv.go195
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/csv_test.go175
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/json.go140
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/json_test.go224
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/lastline.go65
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/lastline_test.go54
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/ltsv.go95
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/ltsv_test.go125
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/parser.go65
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/parser_test.go3
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/reader.go193
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/reader_test.go245
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/regexp.go76
-rw-r--r--src/go/collectors/go.d.plugin/pkg/logs/regexp_test.go131
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/README.md142
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/cache.go56
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/cache_test.go53
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/doc.go40
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/doc_test.go49
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/expr.go62
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/expr_test.go100
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/glob.go265
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/glob_test.go97
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/logical.go101
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/logical_test.go97
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/matcher.go149
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/matcher_test.go122
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/regexp.go60
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/regexp_test.go66
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns.go65
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns_test.go88
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/string.go48
-rw-r--r--src/go/collectors/go.d.plugin/pkg/matcher/string_test.go62
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/counter.go93
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/counter_test.go105
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/gauge.go103
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/gauge_test.go129
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/histogram.go171
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/histogram_test.go136
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/metrics.go12
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/summary.go125
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/summary_test.go78
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/unique_counter.go109
-rw-r--r--src/go/collectors/go.d.plugin/pkg/metrics/unique_counter_test.go145
-rw-r--r--src/go/collectors/go.d.plugin/pkg/multipath/multipath.go90
-rw-r--r--src/go/collectors/go.d.plugin/pkg/multipath/multipath_test.go60
-rw-r--r--src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test-empty.conf0
-rw-r--r--src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test.conf1
-rw-r--r--src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test-empty.conf0
-rw-r--r--src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test.conf1
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/client.go155
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/client_test.go137
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/metric_family.go116
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/metric_family_test.go356
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/metric_series.go110
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/metric_series_test.go140
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/parse.go413
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/parse_test.go1675
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/README.md102
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr.go62
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr_test.go231
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical.go49
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical_test.go226
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse.go97
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse_test.go117
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector.go52
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector_test.go11
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-meta.txt11
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-no-meta.txt8
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-meta.txt11
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-no-meta.txt8
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-meta.txt43
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-no-meta.txt40
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/multiline-help.txt3
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-meta.txt43
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-no-meta.txt40
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.nometa.txt410
-rw-r--r--src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.txt528
-rw-r--r--src/go/collectors/go.d.plugin/pkg/socket/client.go106
-rw-r--r--src/go/collectors/go.d.plugin/pkg/socket/client_test.go163
-rw-r--r--src/go/collectors/go.d.plugin/pkg/socket/servers_test.go139
-rw-r--r--src/go/collectors/go.d.plugin/pkg/socket/types.go41
-rw-r--r--src/go/collectors/go.d.plugin/pkg/socket/utils.go25
-rw-r--r--src/go/collectors/go.d.plugin/pkg/stm/stm.go172
-rw-r--r--src/go/collectors/go.d.plugin/pkg/stm/stm_test.go415
-rw-r--r--src/go/collectors/go.d.plugin/pkg/tlscfg/config.go77
-rw-r--r--src/go/collectors/go.d.plugin/pkg/tlscfg/config_test.go10
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/client.go80
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/client_test.go23
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/doc.go9
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/doc_test.go15
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/duration.go72
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/duration_test.go114
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/request.go92
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/request_test.go180
-rw-r--r--src/go/collectors/go.d.plugin/pkg/web/web.go11
106 files changed, 12780 insertions, 0 deletions
diff --git a/src/go/collectors/go.d.plugin/pkg/README.md b/src/go/collectors/go.d.plugin/pkg/README.md
new file mode 100644
index 000000000..64b25eea2
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/README.md
@@ -0,0 +1,22 @@
+<!--
+title: "Helper Packages"
+custom_edit_url: "https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/README.md"
+sidebar_label: "Helper Packages"
+learn_status: "Published"
+learn_rel_path: "Developers/External plugins/go.d.plugin/Helper Packages"
+-->
+
+# Helper Packages
+
+- if you need IP ranges consider to
+ use [`iprange`](https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/iprange/README.md).
+- if you parse an application log files, then [`log`](https://github.com/netdata/netdata/tree/master/src/go/collectors/go.d.plugin/pkg/logs) is
+ handy.
+- if you need filtering
+ check [`matcher`](https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/matcher/README.md).
+- if you collect metrics from an HTTP endpoint use [`web`](https://github.com/netdata/netdata/tree/master/src/go/collectors/go.d.plugin/pkg/web).
+- if you collect metrics from a prometheus endpoint,
+ then [`prometheus`](https://github.com/netdata/netdata/tree/master/src/go/collectors/go.d.plugin/pkg/prometheus)
+ and [`web`](https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/web/README.md) is what you need.
+- [`tlscfg`](https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/tlscfg/README.md) provides TLS support.
+- [`stm`](https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/stm/README.md) helps you to convert any struct to a `map[string]int64`.
diff --git a/src/go/collectors/go.d.plugin/pkg/buildinfo/version.go b/src/go/collectors/go.d.plugin/pkg/buildinfo/version.go
new file mode 100644
index 000000000..55977a592
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/buildinfo/version.go
@@ -0,0 +1,6 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package buildinfo
+
+// Version stores the agent's version number. It's set during the build process using build flags.
+var Version = "v0.0.0"
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/README.md b/src/go/collectors/go.d.plugin/pkg/iprange/README.md
new file mode 100644
index 000000000..4020bba02
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/README.md
@@ -0,0 +1,37 @@
+<!--
+title: "iprange"
+custom_edit_url: "https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/iprange/README.md"
+sidebar_label: "iprange"
+learn_status: "Published"
+learn_rel_path: "Developers/External plugins/go.d.plugin/Helper Packages"
+-->
+
+# iprange
+
+This package helps you to work with IP ranges.
+
+IP range is a set of IP addresses. Both IPv4 and IPv6 are supported.
+
+IP range interface:
+
+```
+type Range interface {
+ Family() Family
+ Contains(ip net.IP) bool
+ Size() *big.Int
+ fmt.Stringer
+}
+```
+
+## Supported formats
+
+- `IPv4 address` (192.0.2.1)
+- `IPv4 range` (192.0.2.0-192.0.2.10)
+- `IPv4 CIDR` (192.0.2.0/24)
+- `IPv4 subnet mask` (192.0.2.0/255.255.255.0)
+- `IPv6 address` (2001:db8::1)
+- `IPv6 range` (2001:db8::-2001:db8::10)
+- `IPv6 CIDR` (2001:db8::/64)
+
+IP range doesn't contain network and broadcast IP addresses if the format is `IPv4 CIDR`, `IPv4 subnet mask`
+or `IPv6 CIDR`.
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/parse.go b/src/go/collectors/go.d.plugin/pkg/iprange/parse.go
new file mode 100644
index 000000000..3471702a1
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/parse.go
@@ -0,0 +1,138 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package iprange
+
+import (
+ "bytes"
+ "fmt"
+ "net"
+ "regexp"
+ "strings"
+
+ "github.com/apparentlymart/go-cidr/cidr"
+)
+
+// ParseRanges parses s as a space separated list of IP Ranges, returning the result and an error if any.
+// IP Range can be in IPv4 address ("192.0.2.1"), IPv4 range ("192.0.2.0-192.0.2.10")
+// IPv4 CIDR ("192.0.2.0/24"), IPv4 subnet mask ("192.0.2.0/255.255.255.0"),
+// IPv6 address ("2001:db8::1"), IPv6 range ("2001:db8::-2001:db8::10"),
+// or IPv6 CIDR ("2001:db8::/64") form.
+// IPv4 CIDR, IPv4 subnet mask and IPv6 CIDR ranges don't include network and broadcast addresses.
+func ParseRanges(s string) ([]Range, error) {
+ parts := strings.Fields(s)
+ if len(parts) == 0 {
+ return nil, nil
+ }
+
+ var ranges []Range
+ for _, v := range parts {
+ r, err := ParseRange(v)
+ if err != nil {
+ return nil, err
+ }
+
+ if r != nil {
+ ranges = append(ranges, r)
+ }
+ }
+ return ranges, nil
+}
+
+var (
+ reRange = regexp.MustCompile("^[0-9a-f.:-]+$") // addr | addr-addr
+ reCIDR = regexp.MustCompile("^[0-9a-f.:]+/[0-9]{1,3}$") // addr/prefix_length
+ reSubnetMask = regexp.MustCompile("^[0-9.]+/[0-9.]{7,}$") // v4_addr/mask
+)
+
+// ParseRange parses s as an IP Range, returning the result and an error if any.
+// The string s can be in IPv4 address ("192.0.2.1"), IPv4 range ("192.0.2.0-192.0.2.10")
+// IPv4 CIDR ("192.0.2.0/24"), IPv4 subnet mask ("192.0.2.0/255.255.255.0"),
+// IPv6 address ("2001:db8::1"), IPv6 range ("2001:db8::-2001:db8::10"),
+// or IPv6 CIDR ("2001:db8::/64") form.
+// IPv4 CIDR, IPv4 subnet mask and IPv6 CIDR ranges don't include network and broadcast addresses.
+func ParseRange(s string) (Range, error) {
+ s = strings.ToLower(s)
+ if s == "" {
+ return nil, nil
+ }
+
+ var r Range
+ switch {
+ case reRange.MatchString(s):
+ r = parseRange(s)
+ case reCIDR.MatchString(s):
+ r = parseCIDR(s)
+ case reSubnetMask.MatchString(s):
+ r = parseSubnetMask(s)
+ }
+
+ if r == nil {
+ return nil, fmt.Errorf("ip range (%s) invalid syntax", s)
+ }
+ return r, nil
+}
+
+func parseRange(s string) Range {
+ var start, end net.IP
+ if idx := strings.IndexByte(s, '-'); idx != -1 {
+ start, end = net.ParseIP(s[:idx]), net.ParseIP(s[idx+1:])
+ } else {
+ start, end = net.ParseIP(s), net.ParseIP(s)
+ }
+
+ return New(start, end)
+}
+
+func parseCIDR(s string) Range {
+ ip, network, err := net.ParseCIDR(s)
+ if err != nil {
+ return nil
+ }
+
+ start, end := cidr.AddressRange(network)
+ prefixLen, _ := network.Mask.Size()
+
+ if isV4IP(ip) && prefixLen < 31 || isV6IP(ip) && prefixLen < 127 {
+ start = cidr.Inc(start)
+ end = cidr.Dec(end)
+ }
+
+ return parseRange(fmt.Sprintf("%s-%s", start, end))
+}
+
+func parseSubnetMask(s string) Range {
+ idx := strings.LastIndexByte(s, '/')
+ if idx == -1 {
+ return nil
+ }
+
+ address, mask := s[:idx], s[idx+1:]
+
+ ip := net.ParseIP(mask).To4()
+ if ip == nil {
+ return nil
+ }
+
+ prefixLen, bits := net.IPv4Mask(ip[0], ip[1], ip[2], ip[3]).Size()
+ if prefixLen+bits == 0 {
+ return nil
+ }
+
+ return parseCIDR(fmt.Sprintf("%s/%d", address, prefixLen))
+}
+
+func isV4RangeValid(start, end net.IP) bool {
+ return isV4IP(start) && isV4IP(end) && bytes.Compare(end, start) >= 0
+}
+
+func isV6RangeValid(start, end net.IP) bool {
+ return isV6IP(start) && isV6IP(end) && bytes.Compare(end, start) >= 0
+}
+
+func isV4IP(ip net.IP) bool {
+ return ip.To4() != nil
+}
+
+func isV6IP(ip net.IP) bool {
+ return !isV4IP(ip) && ip.To16() != nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/parse_test.go b/src/go/collectors/go.d.plugin/pkg/iprange/parse_test.go
new file mode 100644
index 000000000..8b4ab96b3
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/parse_test.go
@@ -0,0 +1,258 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package iprange
+
+import (
+ "fmt"
+ "net"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParseRanges(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantRanges []Range
+ wantErr bool
+ }{
+ "single range": {
+ input: "192.0.2.0-192.0.2.10",
+ wantRanges: []Range{
+ prepareRange("192.0.2.0", "192.0.2.10"),
+ },
+ },
+ "multiple ranges": {
+ input: "2001:db8::0 192.0.2.0-192.0.2.10 2001:db8::0/126 192.0.2.0/255.255.255.0",
+ wantRanges: []Range{
+ prepareRange("2001:db8::0", "2001:db8::0"),
+ prepareRange("192.0.2.0", "192.0.2.10"),
+ prepareRange("2001:db8::1", "2001:db8::2"),
+ prepareRange("192.0.2.1", "192.0.2.254"),
+ },
+ },
+ "single invalid syntax": {
+ input: "192.0.2.0-192.0.2.",
+ wantErr: true,
+ },
+ "multiple invalid syntax": {
+ input: "2001:db8::0 192.0.2.0-192.0.2.10 2001:db8::0/999 192.0.2.0/255.255.255.0",
+ wantErr: true,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ rs, err := ParseRanges(test.input)
+
+ if test.wantErr {
+ assert.Error(t, err)
+ assert.Nilf(t, rs, "want: nil, got: %s", rs)
+ } else {
+ assert.NoError(t, err)
+ assert.Equalf(t, test.wantRanges, rs, "want: %s, got: %s", test.wantRanges, rs)
+ }
+ })
+ }
+}
+
+func TestParseRange(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantRange Range
+ wantErr bool
+ }{
+ "v4 IP": {
+ input: "192.0.2.0",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.0"),
+ },
+ "v4 IP: invalid address": {
+ input: "192.0.2.",
+ wantErr: true,
+ },
+ "v4 Range": {
+ input: "192.0.2.0-192.0.2.10",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.10"),
+ },
+ "v4 Range: start == end": {
+ input: "192.0.2.0-192.0.2.0",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.0"),
+ },
+ "v4 Range: start > end": {
+ input: "192.0.2.10-192.0.2.0",
+ wantErr: true,
+ },
+ "v4 Range: invalid start": {
+ input: "192.0.2.-192.0.2.10",
+ wantErr: true,
+ },
+ "v4 Range: invalid end": {
+ input: "192.0.2.0-192.0.2.",
+ wantErr: true,
+ },
+ "v4 Range: v6 start": {
+ input: "2001:db8::0-192.0.2.10",
+ wantErr: true,
+ },
+ "v4 Range: v6 end": {
+ input: "192.0.2.0-2001:db8::0",
+ wantErr: true,
+ },
+ "v4 CIDR: /0": {
+ input: "192.0.2.0/0",
+ wantRange: prepareRange("0.0.0.1", "255.255.255.254"),
+ },
+ "v4 CIDR: /24": {
+ input: "192.0.2.0/24",
+ wantRange: prepareRange("192.0.2.1", "192.0.2.254"),
+ },
+ "v4 CIDR: /30": {
+ input: "192.0.2.0/30",
+ wantRange: prepareRange("192.0.2.1", "192.0.2.2"),
+ },
+ "v4 CIDR: /31": {
+ input: "192.0.2.0/31",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.1"),
+ },
+ "v4 CIDR: /32": {
+ input: "192.0.2.0/32",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.0"),
+ },
+ "v4 CIDR: ip instead of host address": {
+ input: "192.0.2.10/24",
+ wantRange: prepareRange("192.0.2.1", "192.0.2.254"),
+ },
+ "v4 CIDR: missing prefix length": {
+ input: "192.0.2.0/",
+ wantErr: true,
+ },
+ "v4 CIDR: invalid prefix length": {
+ input: "192.0.2.0/99",
+ wantErr: true,
+ },
+ "v4 Mask: /0": {
+ input: "192.0.2.0/0.0.0.0",
+ wantRange: prepareRange("0.0.0.1", "255.255.255.254"),
+ },
+ "v4 Mask: /24": {
+ input: "192.0.2.0/255.255.255.0",
+ wantRange: prepareRange("192.0.2.1", "192.0.2.254"),
+ },
+ "v4 Mask: /30": {
+ input: "192.0.2.0/255.255.255.252",
+ wantRange: prepareRange("192.0.2.1", "192.0.2.2"),
+ },
+ "v4 Mask: /31": {
+ input: "192.0.2.0/255.255.255.254",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.1"),
+ },
+ "v4 Mask: /32": {
+ input: "192.0.2.0/255.255.255.255",
+ wantRange: prepareRange("192.0.2.0", "192.0.2.0"),
+ },
+ "v4 Mask: missing prefix mask": {
+ input: "192.0.2.0/",
+ wantErr: true,
+ },
+ "v4 Mask: invalid mask": {
+ input: "192.0.2.0/mask",
+ wantErr: true,
+ },
+ "v4 Mask: not canonical form mask": {
+ input: "192.0.2.0/255.255.0.254",
+ wantErr: true,
+ },
+ "v4 Mask: v6 address": {
+ input: "2001:db8::/255.255.255.0",
+ wantErr: true,
+ },
+
+ "v6 IP": {
+ input: "2001:db8::0",
+ wantRange: prepareRange("2001:db8::0", "2001:db8::0"),
+ },
+ "v6 IP: invalid address": {
+ input: "2001:db8",
+ wantErr: true,
+ },
+ "v6 Range": {
+ input: "2001:db8::-2001:db8::10",
+ wantRange: prepareRange("2001:db8::", "2001:db8::10"),
+ },
+ "v6 Range: start == end": {
+ input: "2001:db8::-2001:db8::",
+ wantRange: prepareRange("2001:db8::", "2001:db8::"),
+ },
+ "v6 Range: start > end": {
+ input: "2001:db8::10-2001:db8::",
+ wantErr: true,
+ },
+ "v6 Range: invalid start": {
+ input: "2001:db8-2001:db8::10",
+ wantErr: true,
+ },
+ "v6 Range: invalid end": {
+ input: "2001:db8::-2001:db8",
+ wantErr: true,
+ },
+ "v6 Range: v4 start": {
+ input: "192.0.2.0-2001:db8::10",
+ wantErr: true,
+ },
+ "v6 Range: v4 end": {
+ input: "2001:db8::-192.0.2.10",
+ wantErr: true,
+ },
+ "v6 CIDR: /0": {
+ input: "2001:db8::/0",
+ wantRange: prepareRange("::1", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe"),
+ },
+ "v6 CIDR: /64": {
+ input: "2001:db8::/64",
+ wantRange: prepareRange("2001:db8::1", "2001:db8::ffff:ffff:ffff:fffe"),
+ },
+ "v6 CIDR: /126": {
+ input: "2001:db8::/126",
+ wantRange: prepareRange("2001:db8::1", "2001:db8::2"),
+ },
+ "v6 CIDR: /127": {
+ input: "2001:db8::/127",
+ wantRange: prepareRange("2001:db8::", "2001:db8::1"),
+ },
+ "v6 CIDR: /128": {
+ input: "2001:db8::/128",
+ wantRange: prepareRange("2001:db8::", "2001:db8::"),
+ },
+ "v6 CIDR: ip instead of host address": {
+ input: "2001:db8::10/64",
+ wantRange: prepareRange("2001:db8::1", "2001:db8::ffff:ffff:ffff:fffe"),
+ },
+ "v6 CIDR: missing prefix length": {
+ input: "2001:db8::/",
+ wantErr: true,
+ },
+ "v6 CIDR: invalid prefix length": {
+ input: "2001:db8::/999",
+ wantErr: true,
+ },
+ }
+
+ for name, test := range tests {
+ name = fmt.Sprintf("%s (%s)", name, test.input)
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+
+ if test.wantErr {
+ assert.Error(t, err)
+ assert.Nilf(t, r, "want: nil, got: %s", r)
+ } else {
+ assert.NoError(t, err)
+ assert.Equalf(t, test.wantRange, r, "want: %s, got: %s", test.wantRange, r)
+ }
+ })
+ }
+}
+
+func prepareRange(start, end string) Range {
+ return New(net.ParseIP(start), net.ParseIP(end))
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/pool.go b/src/go/collectors/go.d.plugin/pkg/iprange/pool.go
new file mode 100644
index 000000000..48ba5689b
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/pool.go
@@ -0,0 +1,40 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package iprange
+
+import (
+ "math/big"
+ "net"
+ "strings"
+)
+
+// Pool is a collection of IP Ranges.
+type Pool []Range
+
+// String returns the string form of the pool.
+func (p Pool) String() string {
+ var b strings.Builder
+ for _, r := range p {
+ b.WriteString(r.String() + " ")
+ }
+ return strings.TrimSpace(b.String())
+}
+
+// Size reports the number of IP addresses in the pool.
+func (p Pool) Size() *big.Int {
+ size := big.NewInt(0)
+ for _, r := range p {
+ size.Add(size, r.Size())
+ }
+ return size
+}
+
+// Contains reports whether the pool includes IP.
+func (p Pool) Contains(ip net.IP) bool {
+ for _, r := range p {
+ if r.Contains(ip) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/pool_test.go b/src/go/collectors/go.d.plugin/pkg/iprange/pool_test.go
new file mode 100644
index 000000000..2864b6711
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/pool_test.go
@@ -0,0 +1,104 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package iprange
+
+import (
+ "fmt"
+ "math/big"
+ "net"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPool_String(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantString string
+ }{
+ "singe": {
+ input: "192.0.2.0-192.0.2.10",
+ wantString: "192.0.2.0-192.0.2.10",
+ },
+ "multiple": {
+ input: "192.0.2.0-192.0.2.10 2001:db8::-2001:db8::10",
+ wantString: "192.0.2.0-192.0.2.10 2001:db8::-2001:db8::10",
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ rs, err := ParseRanges(test.input)
+ require.NoError(t, err)
+ p := Pool(rs)
+
+ assert.Equal(t, test.wantString, p.String())
+ })
+ }
+}
+
+func TestPool_Size(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantSize *big.Int
+ }{
+ "singe": {
+ input: "192.0.2.0-192.0.2.10",
+ wantSize: big.NewInt(11),
+ },
+ "multiple": {
+ input: "192.0.2.0-192.0.2.10 2001:db8::-2001:db8::10",
+ wantSize: big.NewInt(11 + 17),
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ rs, err := ParseRanges(test.input)
+ require.NoError(t, err)
+ p := Pool(rs)
+
+ assert.Equal(t, test.wantSize, p.Size())
+ })
+ }
+}
+
+func TestPool_Contains(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ ip string
+ wantFail bool
+ }{
+ "inside first": {
+ input: "192.0.2.0-192.0.2.10 192.0.2.20-192.0.2.30 2001:db8::-2001:db8::10",
+ ip: "192.0.2.5",
+ },
+ "inside last": {
+ input: "192.0.2.0-192.0.2.10 192.0.2.20-192.0.2.30 2001:db8::-2001:db8::10",
+ ip: "2001:db8::5",
+ },
+ "outside": {
+ input: "192.0.2.0-192.0.2.10 192.0.2.20-192.0.2.30 2001:db8::-2001:db8::10",
+ ip: "192.0.2.100",
+ wantFail: true,
+ },
+ }
+
+ for name, test := range tests {
+ name = fmt.Sprintf("%s (range: %s, ip: %s)", name, test.input, test.ip)
+ t.Run(name, func(t *testing.T) {
+ rs, err := ParseRanges(test.input)
+ require.NoError(t, err)
+ ip := net.ParseIP(test.ip)
+ require.NotNil(t, ip)
+ p := Pool(rs)
+
+ if test.wantFail {
+ assert.False(t, p.Contains(ip))
+ } else {
+ assert.True(t, p.Contains(ip))
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/range.go b/src/go/collectors/go.d.plugin/pkg/iprange/range.go
new file mode 100644
index 000000000..1fe02eace
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/range.go
@@ -0,0 +1,100 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package iprange
+
+import (
+ "bytes"
+ "fmt"
+ "math/big"
+ "net"
+)
+
+// Family represents IP Range address-family.
+type Family uint8
+
+const (
+ // V4Family is IPv4 address-family.
+ V4Family Family = iota
+ // V6Family is IPv6 address-family.
+ V6Family
+)
+
+// Range represents an IP range.
+type Range interface {
+ Family() Family
+ Contains(ip net.IP) bool
+ Size() *big.Int
+ fmt.Stringer
+}
+
+// New returns new IP Range.
+// If it is not a valid range (start and end IPs have different address-families, or start > end),
+// New returns nil.
+func New(start, end net.IP) Range {
+ if isV4RangeValid(start, end) {
+ return v4Range{start: start, end: end}
+ }
+ if isV6RangeValid(start, end) {
+ return v6Range{start: start, end: end}
+ }
+ return nil
+}
+
+type v4Range struct {
+ start net.IP
+ end net.IP
+}
+
+// String returns the string form of the range.
+func (r v4Range) String() string {
+ return fmt.Sprintf("%s-%s", r.start, r.end)
+}
+
+// Family returns the range address family.
+func (r v4Range) Family() Family {
+ return V4Family
+}
+
+// Contains reports whether the range includes IP.
+func (r v4Range) Contains(ip net.IP) bool {
+ return bytes.Compare(ip, r.start) >= 0 && bytes.Compare(ip, r.end) <= 0
+}
+
+// Size reports the number of IP addresses in the range.
+func (r v4Range) Size() *big.Int {
+ return big.NewInt(v4ToInt(r.end) - v4ToInt(r.start) + 1)
+}
+
+type v6Range struct {
+ start net.IP
+ end net.IP
+}
+
+// String returns the string form of the range.
+func (r v6Range) String() string {
+ return fmt.Sprintf("%s-%s", r.start, r.end)
+}
+
+// Family returns the range address family.
+func (r v6Range) Family() Family {
+ return V6Family
+}
+
+// Contains reports whether the range includes IP.
+func (r v6Range) Contains(ip net.IP) bool {
+ return bytes.Compare(ip, r.start) >= 0 && bytes.Compare(ip, r.end) <= 0
+}
+
+// Size reports the number of IP addresses in the range.
+func (r v6Range) Size() *big.Int {
+ size := big.NewInt(0)
+ size.Add(size, big.NewInt(0).SetBytes(r.end))
+ size.Sub(size, big.NewInt(0).SetBytes(r.start))
+ size.Add(size, big.NewInt(1))
+ return size
+}
+
+func v4ToInt(ip net.IP) int64 {
+ ip = ip.To4()
+ return int64(ip[0])<<24 | int64(ip[1])<<16 | int64(ip[2])<<8 | int64(ip[3])
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/iprange/range_test.go b/src/go/collectors/go.d.plugin/pkg/iprange/range_test.go
new file mode 100644
index 000000000..631d012e0
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/iprange/range_test.go
@@ -0,0 +1,200 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package iprange
+
+import (
+ "fmt"
+ "math/big"
+ "net"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestV4Range_String(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantString string
+ }{
+ "IP": {input: "192.0.2.0", wantString: "192.0.2.0-192.0.2.0"},
+ "Range": {input: "192.0.2.0-192.0.2.10", wantString: "192.0.2.0-192.0.2.10"},
+ "CIDR": {input: "192.0.2.0/24", wantString: "192.0.2.1-192.0.2.254"},
+ "Mask": {input: "192.0.2.0/255.255.255.0", wantString: "192.0.2.1-192.0.2.254"},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.wantString, r.String())
+ })
+ }
+}
+
+func TestV4Range_Family(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ }{
+ "IP": {input: "192.0.2.0"},
+ "Range": {input: "192.0.2.0-192.0.2.10"},
+ "CIDR": {input: "192.0.2.0/24"},
+ "Mask": {input: "192.0.2.0/255.255.255.0"},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+
+ assert.Equal(t, V4Family, r.Family())
+ })
+ }
+}
+
+func TestV4Range_Size(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantSize *big.Int
+ }{
+ "IP": {input: "192.0.2.0", wantSize: big.NewInt(1)},
+ "Range": {input: "192.0.2.0-192.0.2.10", wantSize: big.NewInt(11)},
+ "CIDR": {input: "192.0.2.0/24", wantSize: big.NewInt(254)},
+ "CIDR 31": {input: "192.0.2.0/31", wantSize: big.NewInt(2)},
+ "CIDR 32": {input: "192.0.2.0/32", wantSize: big.NewInt(1)},
+ "Mask": {input: "192.0.2.0/255.255.255.0", wantSize: big.NewInt(254)},
+ "Mask 31": {input: "192.0.2.0/255.255.255.254", wantSize: big.NewInt(2)},
+ "Mask 32": {input: "192.0.2.0/255.255.255.255", wantSize: big.NewInt(1)},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.wantSize, r.Size())
+ })
+ }
+}
+
+func TestV4Range_Contains(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ ip string
+ wantFail bool
+ }{
+ "inside": {input: "192.0.2.0-192.0.2.10", ip: "192.0.2.5"},
+ "outside": {input: "192.0.2.0-192.0.2.10", ip: "192.0.2.55", wantFail: true},
+ "eq start": {input: "192.0.2.0-192.0.2.10", ip: "192.0.2.0"},
+ "eq end": {input: "192.0.2.0-192.0.2.10", ip: "192.0.2.10"},
+ "v6": {input: "192.0.2.0-192.0.2.10", ip: "2001:db8::", wantFail: true},
+ }
+
+ for name, test := range tests {
+ name = fmt.Sprintf("%s (range: %s, ip: %s)", name, test.input, test.ip)
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+ ip := net.ParseIP(test.ip)
+ require.NotNil(t, ip)
+
+ if test.wantFail {
+ assert.False(t, r.Contains(ip))
+ } else {
+ assert.True(t, r.Contains(ip))
+ }
+ })
+ }
+}
+
+func TestV6Range_String(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantString string
+ }{
+ "IP": {input: "2001:db8::", wantString: "2001:db8::-2001:db8::"},
+ "Range": {input: "2001:db8::-2001:db8::10", wantString: "2001:db8::-2001:db8::10"},
+ "CIDR": {input: "2001:db8::/126", wantString: "2001:db8::1-2001:db8::2"},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.wantString, r.String())
+ })
+ }
+}
+
+func TestV6Range_Family(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ }{
+ "IP": {input: "2001:db8::"},
+ "Range": {input: "2001:db8::-2001:db8::10"},
+ "CIDR": {input: "2001:db8::/126"},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+
+ assert.Equal(t, V6Family, r.Family())
+ })
+ }
+}
+
+func TestV6Range_Size(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ wantSize *big.Int
+ }{
+ "IP": {input: "2001:db8::", wantSize: big.NewInt(1)},
+ "Range": {input: "2001:db8::-2001:db8::10", wantSize: big.NewInt(17)},
+ "CIDR": {input: "2001:db8::/120", wantSize: big.NewInt(254)},
+ "CIDR 127": {input: "2001:db8::/127", wantSize: big.NewInt(2)},
+ "CIDR 128": {input: "2001:db8::/128", wantSize: big.NewInt(1)},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.wantSize, r.Size())
+ })
+ }
+}
+
+func TestV6Range_Contains(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ ip string
+ wantFail bool
+ }{
+ "inside": {input: "2001:db8::-2001:db8::10", ip: "2001:db8::5"},
+ "outside": {input: "2001:db8::-2001:db8::10", ip: "2001:db8::ff", wantFail: true},
+ "eq start": {input: "2001:db8::-2001:db8::10", ip: "2001:db8::"},
+ "eq end": {input: "2001:db8::-2001:db8::10", ip: "2001:db8::10"},
+ "v4": {input: "2001:db8::-2001:db8::10", ip: "192.0.2.0", wantFail: true},
+ }
+
+ for name, test := range tests {
+ name = fmt.Sprintf("%s (range: %s, ip: %s)", name, test.input, test.ip)
+ t.Run(name, func(t *testing.T) {
+ r, err := ParseRange(test.input)
+ require.NoError(t, err)
+ ip := net.ParseIP(test.ip)
+ require.NotNil(t, ip)
+
+ if test.wantFail {
+ assert.False(t, r.Contains(ip))
+ } else {
+ assert.True(t, r.Contains(ip))
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/k8sclient/k8sclient.go b/src/go/collectors/go.d.plugin/pkg/k8sclient/k8sclient.go
new file mode 100644
index 000000000..079239c1c
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/k8sclient/k8sclient.go
@@ -0,0 +1,71 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package k8sclient
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+
+ "k8s.io/client-go/kubernetes"
+ "k8s.io/client-go/kubernetes/fake"
+ "k8s.io/client-go/rest"
+ "k8s.io/client-go/tools/clientcmd"
+
+ _ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
+)
+
+const (
+ EnvFakeClient = "KUBERNETES_FAKE_CLIENTSET"
+ defaultUserAgent = "Netdata/k8s-client"
+)
+
+func New(userAgent string) (kubernetes.Interface, error) {
+ if userAgent == "" {
+ userAgent = defaultUserAgent
+ }
+
+ switch {
+ case os.Getenv(EnvFakeClient) != "":
+ return fake.NewSimpleClientset(), nil
+ case os.Getenv("KUBERNETES_SERVICE_HOST") != "" && os.Getenv("KUBERNETES_SERVICE_PORT") != "":
+ return newInCluster(userAgent)
+ default:
+ return newOutOfCluster(userAgent)
+ }
+}
+
+func newInCluster(userAgent string) (*kubernetes.Clientset, error) {
+ config, err := rest.InClusterConfig()
+ if err != nil {
+ return nil, err
+ }
+
+ config.UserAgent = userAgent
+
+ return kubernetes.NewForConfig(config)
+}
+
+func newOutOfCluster(userAgent string) (*kubernetes.Clientset, error) {
+ home := homeDir()
+ if home == "" {
+ return nil, errors.New("couldn't find home directory")
+ }
+
+ path := filepath.Join(home, ".kube", "config")
+ config, err := clientcmd.BuildConfigFromFlags("", path)
+ if err != nil {
+ return nil, err
+ }
+
+ config.UserAgent = userAgent
+
+ return kubernetes.NewForConfig(config)
+}
+
+func homeDir() string {
+ if h := os.Getenv("HOME"); h != "" {
+ return h
+ }
+ return os.Getenv("USERPROFILE") // windows
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/csv.go b/src/go/collectors/go.d.plugin/pkg/logs/csv.go
new file mode 100644
index 000000000..0b7d90009
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/csv.go
@@ -0,0 +1,195 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "bytes"
+ "encoding/csv"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+)
+
+type (
+ CSVConfig struct {
+ FieldsPerRecord int `yaml:"fields_per_record" json:"fields_per_record"`
+ Delimiter string `yaml:"delimiter" json:"delimiter"`
+ TrimLeadingSpace bool `yaml:"trim_leading_space" json:"trim_leading_space"`
+ Format string `yaml:"format" json:"format"`
+ CheckField func(string) (string, int, bool) `yaml:"-" json:"-"`
+ }
+
+ CSVParser struct {
+ Config CSVConfig
+ reader *csv.Reader
+ format *csvFormat
+ }
+
+ csvFormat struct {
+ raw string
+ maxIndex int
+ fields []csvField
+ }
+
+ csvField struct {
+ name string
+ idx int
+ }
+)
+
+func NewCSVParser(config CSVConfig, in io.Reader) (*CSVParser, error) {
+ if config.Format == "" {
+ return nil, errors.New("empty csv format")
+ }
+
+ format, err := newCSVFormat(config)
+ if err != nil {
+ return nil, fmt.Errorf("bad csv format '%s': %v", config.Format, err)
+ }
+
+ p := &CSVParser{
+ Config: config,
+ reader: newCSVReader(in, config),
+ format: format,
+ }
+ return p, nil
+}
+
+func (p *CSVParser) ReadLine(line LogLine) error {
+ record, err := p.reader.Read()
+ if err != nil {
+ return handleCSVReaderError(err)
+ }
+ return p.format.parse(record, line)
+}
+
+func (p *CSVParser) Parse(row []byte, line LogLine) error {
+ r := newCSVReader(bytes.NewBuffer(row), p.Config)
+ record, err := r.Read()
+ if err != nil {
+ return handleCSVReaderError(err)
+ }
+ return p.format.parse(record, line)
+}
+
+func (p CSVParser) Info() string {
+ return fmt.Sprintf("csv: %s", p.format.raw)
+}
+
+func (f *csvFormat) parse(record []string, line LogLine) error {
+ if len(record) <= f.maxIndex {
+ return &ParseError{msg: "csv parse: unmatched line"}
+ }
+
+ for _, v := range f.fields {
+ if err := line.Assign(v.name, record[v.idx]); err != nil {
+ return &ParseError{msg: fmt.Sprintf("csv parse: %v", err), err: err}
+ }
+ }
+ return nil
+}
+
+func newCSVReader(in io.Reader, config CSVConfig) *csv.Reader {
+ r := csv.NewReader(in)
+ if config.Delimiter != "" {
+ if d, err := parseCSVDelimiter(config.Delimiter); err == nil {
+ r.Comma = d
+ }
+ }
+ r.TrimLeadingSpace = config.TrimLeadingSpace
+ r.FieldsPerRecord = config.FieldsPerRecord
+ r.ReuseRecord = true
+ return r
+}
+
+func newCSVFormat(config CSVConfig) (*csvFormat, error) {
+ r := csv.NewReader(strings.NewReader(config.Format))
+ if config.Delimiter != "" {
+ if d, err := parseCSVDelimiter(config.Delimiter); err == nil {
+ r.Comma = d
+ }
+ }
+ r.TrimLeadingSpace = config.TrimLeadingSpace
+
+ record, err := r.Read()
+ if err != nil {
+ return nil, err
+ }
+
+ fields, err := createCSVFields(record, config.CheckField)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(fields) == 0 {
+ return nil, errors.New("zero fields")
+ }
+
+ format := &csvFormat{
+ raw: config.Format,
+ maxIndex: fields[len(fields)-1].idx,
+ fields: fields,
+ }
+ return format, nil
+}
+
+func createCSVFields(format []string, check func(string) (string, int, bool)) ([]csvField, error) {
+ if check == nil {
+ check = checkCSVFormatField
+ }
+ var fields []csvField
+ var offset int
+ seen := make(map[string]bool)
+
+ for i, name := range format {
+ name = strings.Trim(name, `"`)
+
+ name, addOffset, valid := check(name)
+ offset += addOffset
+ if !valid {
+ continue
+ }
+ if seen[name] {
+ return nil, fmt.Errorf("duplicate field: %s", name)
+ }
+ seen[name] = true
+
+ idx := i + offset
+ fields = append(fields, csvField{name, idx})
+ }
+ return fields, nil
+}
+
+func handleCSVReaderError(err error) error {
+ if isCSVParseError(err) {
+ return &ParseError{msg: fmt.Sprintf("csv parse: %v", err), err: err}
+ }
+ return err
+}
+
+func isCSVParseError(err error) bool {
+ return errors.Is(err, csv.ErrBareQuote) || errors.Is(err, csv.ErrFieldCount) || errors.Is(err, csv.ErrQuote)
+}
+
+func checkCSVFormatField(name string) (newName string, offset int, valid bool) {
+ if len(name) < 2 || !strings.HasPrefix(name, "$") {
+ return "", 0, false
+ }
+ return name, 0, true
+}
+
+func parseCSVDelimiter(s string) (rune, error) {
+ if isNumber(s) {
+ d, err := strconv.ParseInt(s, 10, 32)
+ if err != nil {
+ return 0, fmt.Errorf("invalid CSV delimiter: %v", err)
+ }
+ return rune(d), nil
+ }
+ if len(s) != 1 {
+ return 0, errors.New("invalid CSV delimiter: must be a single character")
+ }
+ return rune(s[0]), nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/csv_test.go b/src/go/collectors/go.d.plugin/pkg/logs/csv_test.go
new file mode 100644
index 000000000..d7baaa1b5
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/csv_test.go
@@ -0,0 +1,175 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+var testCSVConfig = CSVConfig{
+ Delimiter: " ",
+ Format: "$A %B",
+}
+
+func TestNewCSVParser(t *testing.T) {
+ tests := []struct {
+ name string
+ format string
+ wantErr bool
+ }{
+ {name: "valid format", format: "$A $B"},
+ {name: "empty format", wantErr: true},
+ {name: "bad format: csv read error", format: "$A $B \"$C", wantErr: true},
+ {name: "bad format: duplicate fields", format: "$A $A", wantErr: true},
+ {name: "bad format: zero fields", format: "!A !B", wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ c := testCSVConfig
+ c.Format = tt.format
+ p, err := NewCSVParser(c, nil)
+ if tt.wantErr {
+ assert.Error(t, err)
+ assert.Nil(t, p)
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, p)
+ }
+ })
+ }
+}
+
+func TestNewCSVFormat(t *testing.T) {
+ tests := []struct {
+ format string
+ wantFormat csvFormat
+ wantErr bool
+ }{
+ {format: "$A $B", wantFormat: csvFormat{maxIndex: 1, fields: []csvField{{"$A", 0}, {"$B", 1}}}},
+ {format: "$A $B !C $E", wantFormat: csvFormat{maxIndex: 3, fields: []csvField{{"$A", 0}, {"$B", 1}, {"$E", 3}}}},
+ {format: "!A !B !C $E", wantFormat: csvFormat{maxIndex: 3, fields: []csvField{{"$E", 3}}}},
+ {format: "$A $OFFSET $B", wantFormat: csvFormat{maxIndex: 3, fields: []csvField{{"$A", 0}, {"$B", 3}}}},
+ {format: "$A $OFFSET $B $OFFSET !A", wantFormat: csvFormat{maxIndex: 3, fields: []csvField{{"$A", 0}, {"$B", 3}}}},
+ {format: "$A $OFFSET $OFFSET $B", wantFormat: csvFormat{maxIndex: 5, fields: []csvField{{"$A", 0}, {"$B", 5}}}},
+ {format: "$OFFSET $A $OFFSET $B", wantFormat: csvFormat{maxIndex: 5, fields: []csvField{{"$A", 2}, {"$B", 5}}}},
+ {format: "$A \"$A", wantErr: true},
+ {format: "$A $A", wantErr: true},
+ {format: "!A !A", wantErr: true},
+ {format: "", wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.format, func(t *testing.T) {
+ c := testCSVConfig
+ c.Format = tt.format
+ c.CheckField = testCheckCSVFormatField
+ tt.wantFormat.raw = tt.format
+
+ f, err := newCSVFormat(c)
+
+ if tt.wantErr {
+ assert.Error(t, err)
+ assert.Nil(t, f)
+ } else {
+ assert.NoError(t, err)
+ assert.Equal(t, tt.wantFormat, *f)
+ }
+ })
+ }
+}
+
+func TestCSVParser_ReadLine(t *testing.T) {
+ tests := []struct {
+ name string
+ row string
+ format string
+ wantErr bool
+ wantParseErr bool
+ }{
+ {name: "match and no error", row: "1 2 3", format: `$A $B $C`},
+ {name: "match but error on assigning", row: "1 2 3", format: `$A $B $ERR`, wantErr: true, wantParseErr: true},
+ {name: "not match", row: "1 2 3", format: `$A $B $C $d`, wantErr: true, wantParseErr: true},
+ {name: "error on reading csv.Err", row: "1 2\"3", format: `$A $B $C`, wantErr: true, wantParseErr: true},
+ {name: "error on reading EOF", row: "", format: `$A $B $C`, wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var line logLine
+ r := strings.NewReader(tt.row)
+ c := testCSVConfig
+ c.Format = tt.format
+ p, err := NewCSVParser(c, r)
+ require.NoError(t, err)
+
+ err = p.ReadLine(&line)
+
+ if tt.wantErr {
+ require.Error(t, err)
+ if tt.wantParseErr {
+ assert.True(t, IsParseError(err))
+ } else {
+ assert.False(t, IsParseError(err))
+ }
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
+
+func TestCSVParser_Parse(t *testing.T) {
+ tests := []struct {
+ name string
+ row string
+ format string
+ wantErr bool
+ }{
+ {name: "match and no error", row: "1 2 3", format: `$A $B $C`},
+ {name: "match but error on assigning", row: "1 2 3", format: `$A $B $ERR`, wantErr: true},
+ {name: "not match", row: "1 2 3", format: `$A $B $C $d`, wantErr: true},
+ {name: "error on reading csv.Err", row: "1 2\"3", format: `$A $B $C`, wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var line logLine
+ r := strings.NewReader(tt.row)
+ c := testCSVConfig
+ c.Format = tt.format
+ p, err := NewCSVParser(c, r)
+ require.NoError(t, err)
+
+ err = p.ReadLine(&line)
+
+ if tt.wantErr {
+ require.Error(t, err)
+ assert.True(t, IsParseError(err))
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+
+}
+
+func TestCSVParser_Info(t *testing.T) {
+ p, err := NewCSVParser(testCSVConfig, nil)
+ require.NoError(t, err)
+ assert.NotZero(t, p.Info())
+}
+
+func testCheckCSVFormatField(name string) (newName string, offset int, valid bool) {
+ if len(name) < 2 || !strings.HasPrefix(name, "$") {
+ return "", 0, false
+ }
+ if name == "$OFFSET" {
+ return "", 1, false
+ }
+ return name, 0, true
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/json.go b/src/go/collectors/go.d.plugin/pkg/logs/json.go
new file mode 100644
index 000000000..ceb32e272
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/json.go
@@ -0,0 +1,140 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "strconv"
+
+ "github.com/valyala/fastjson"
+)
+
+type JSONConfig struct {
+ Mapping map[string]string `yaml:"mapping" json:"mapping"`
+}
+
+type JSONParser struct {
+ reader *bufio.Reader
+ parser fastjson.Parser
+ buf []byte
+ mapping map[string]string
+}
+
+func NewJSONParser(config JSONConfig, in io.Reader) (*JSONParser, error) {
+ parser := &JSONParser{
+ reader: bufio.NewReader(in),
+ mapping: config.Mapping,
+ buf: make([]byte, 0, 100),
+ }
+ return parser, nil
+}
+
+func (p *JSONParser) ReadLine(line LogLine) error {
+ row, err := p.reader.ReadSlice('\n')
+ if err != nil && len(row) == 0 {
+ return err
+ }
+ if len(row) > 0 && row[len(row)-1] == '\n' {
+ row = row[:len(row)-1]
+ }
+ return p.Parse(row, line)
+}
+
+func (p *JSONParser) Parse(row []byte, line LogLine) error {
+ val, err := p.parser.ParseBytes(row)
+ if err != nil {
+ return err
+ }
+
+ if err := p.parseObject("", val, line); err != nil {
+ return &ParseError{msg: fmt.Sprintf("json parse: %v", err), err: err}
+ }
+
+ return nil
+}
+
+func (p *JSONParser) parseObject(prefix string, val *fastjson.Value, line LogLine) error {
+ obj, err := val.Object()
+ if err != nil {
+ return err
+ }
+
+ obj.Visit(func(key []byte, v *fastjson.Value) {
+ if err != nil {
+ return
+ }
+
+ k := jsonObjKey(prefix, string(key))
+
+ switch v.Type() {
+ case fastjson.TypeString, fastjson.TypeNumber:
+ err = p.parseStringNumber(k, v, line)
+ case fastjson.TypeArray:
+ err = p.parseArray(k, v, line)
+ case fastjson.TypeObject:
+ err = p.parseObject(k, v, line)
+ default:
+ return
+ }
+ })
+
+ return err
+}
+
+func jsonObjKey(prefix, key string) string {
+ if prefix == "" {
+ return key
+ }
+ return prefix + "." + key
+}
+
+func (p *JSONParser) parseArray(key string, val *fastjson.Value, line LogLine) error {
+ arr, err := val.Array()
+ if err != nil {
+ return err
+ }
+
+ for i, v := range arr {
+ k := jsonObjKey(key, strconv.Itoa(i))
+
+ switch v.Type() {
+ case fastjson.TypeString, fastjson.TypeNumber:
+ err = p.parseStringNumber(k, v, line)
+ case fastjson.TypeArray:
+ err = p.parseArray(k, v, line)
+ case fastjson.TypeObject:
+ err = p.parseObject(k, v, line)
+ default:
+ continue
+ }
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return err
+}
+
+func (p *JSONParser) parseStringNumber(key string, val *fastjson.Value, line LogLine) error {
+ if mapped, ok := p.mapping[key]; ok {
+ key = mapped
+ }
+
+ p.buf = p.buf[:0]
+ if p.buf = val.MarshalTo(p.buf); len(p.buf) == 0 {
+ return nil
+ }
+
+ if val.Type() == fastjson.TypeString {
+ // trim "
+ return line.Assign(key, string(p.buf[1:len(p.buf)-1]))
+ }
+ return line.Assign(key, string(p.buf))
+}
+
+func (p *JSONParser) Info() string {
+ return fmt.Sprintf("json: %q", p.mapping)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/json_test.go b/src/go/collectors/go.d.plugin/pkg/logs/json_test.go
new file mode 100644
index 000000000..b82850031
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/json_test.go
@@ -0,0 +1,224 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewJSONParser(t *testing.T) {
+ tests := map[string]struct {
+ config JSONConfig
+ wantErr bool
+ }{
+ "empty config": {
+ config: JSONConfig{},
+ wantErr: false,
+ },
+ "with mappings": {
+ config: JSONConfig{Mapping: map[string]string{"from_field_1": "to_field_1"}},
+ wantErr: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ p, err := NewJSONParser(test.config, nil)
+
+ if test.wantErr {
+ assert.Error(t, err)
+ assert.Nil(t, p)
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, p)
+ }
+ })
+ }
+}
+
+func TestJSONParser_ReadLine(t *testing.T) {
+ tests := map[string]struct {
+ config JSONConfig
+ input string
+ wantAssigned map[string]string
+ wantErr bool
+ }{
+ "string value": {
+ input: `{ "string": "example.com" }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "string": "example.com",
+ },
+ },
+ "int value": {
+ input: `{ "int": 1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "int": "1",
+ },
+ },
+ "float value": {
+ input: `{ "float": 1.1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "float": "1.1",
+ },
+ },
+ "string, int, float values": {
+ input: `{ "string": "example.com", "int": 1, "float": 1.1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "string": "example.com",
+ "int": "1",
+ "float": "1.1",
+ },
+ },
+ "string, int, float values with mappings": {
+ config: JSONConfig{Mapping: map[string]string{
+ "string": "STRING",
+ "int": "INT",
+ "float": "FLOAT",
+ }},
+ input: `{ "string": "example.com", "int": 1, "float": 1.1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "STRING": "example.com",
+ "INT": "1",
+ "FLOAT": "1.1",
+ },
+ },
+ "nested": {
+ input: `{"one":{"two":2,"three":{"four":4}},"five":5}`,
+ config: JSONConfig{Mapping: map[string]string{
+ "one.two": "mapped_value",
+ }},
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "mapped_value": "2",
+ "one.three.four": "4",
+ "five": "5",
+ },
+ },
+ "nested with array": {
+ input: `{"one":{"two":[2,22]},"five":5}`,
+ config: JSONConfig{Mapping: map[string]string{
+ "one.two.1": "mapped_value",
+ }},
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "one.two.0": "2",
+ "mapped_value": "22",
+ "five": "5",
+ },
+ },
+ "error on malformed JSON": {
+ input: `{ "host"": unquoted_string}`,
+ wantErr: true,
+ },
+ "error on empty input": {
+ wantErr: true,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ line := newLogLine()
+ in := strings.NewReader(test.input)
+ p, err := NewJSONParser(test.config, in)
+ require.NoError(t, err)
+ require.NotNil(t, p)
+
+ err = p.ReadLine(line)
+
+ if test.wantErr {
+ assert.Error(t, err)
+ } else {
+ require.NoError(t, err)
+ assert.Equal(t, test.wantAssigned, line.assigned)
+ }
+ })
+ }
+}
+
+func TestJSONParser_Parse(t *testing.T) {
+ tests := map[string]struct {
+ config JSONConfig
+ input string
+ wantAssigned map[string]string
+ wantErr bool
+ }{
+ "string value": {
+ input: `{ "string": "example.com" }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "string": "example.com",
+ },
+ },
+ "int value": {
+ input: `{ "int": 1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "int": "1",
+ },
+ },
+ "float value": {
+ input: `{ "float": 1.1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "float": "1.1",
+ },
+ },
+ "string, int, float values": {
+ input: `{ "string": "example.com", "int": 1, "float": 1.1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "string": "example.com",
+ "int": "1",
+ "float": "1.1",
+ },
+ },
+ "string, int, float values with mappings": {
+ config: JSONConfig{Mapping: map[string]string{
+ "string": "STRING",
+ "int": "INT",
+ "float": "FLOAT",
+ }},
+ input: `{ "string": "example.com", "int": 1, "float": 1.1 }`,
+ wantErr: false,
+ wantAssigned: map[string]string{
+ "STRING": "example.com",
+ "INT": "1",
+ "FLOAT": "1.1",
+ },
+ },
+ "error on malformed JSON": {
+ input: `{ "host"": unquoted_string}`,
+ wantErr: true,
+ },
+ "error on empty input": {
+ wantErr: true,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ line := newLogLine()
+ p, err := NewJSONParser(test.config, nil)
+ require.NoError(t, err)
+ require.NotNil(t, p)
+
+ err = p.Parse([]byte(test.input), line)
+
+ if test.wantErr {
+ assert.Error(t, err)
+ } else {
+ require.NoError(t, err)
+ assert.Equal(t, test.wantAssigned, line.assigned)
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/lastline.go b/src/go/collectors/go.d.plugin/pkg/logs/lastline.go
new file mode 100644
index 000000000..911dbf497
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/lastline.go
@@ -0,0 +1,65 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "errors"
+ "os"
+
+ "github.com/clbanning/rfile/v2"
+)
+
+const DefaultMaxLineWidth = 4 * 1024 // assume disk block size is 4K
+
+var ErrTooLongLine = errors.New("too long line")
+
+// ReadLastLine returns the last line of the file and any read error encountered.
+// It expects last line width <= maxLineWidth.
+// If maxLineWidth <= 0, it defaults to DefaultMaxLineWidth.
+func ReadLastLine(filename string, maxLineWidth int64) ([]byte, error) {
+ if maxLineWidth <= 0 {
+ maxLineWidth = DefaultMaxLineWidth
+ }
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer func() { _ = f.Close() }()
+
+ stat, _ := f.Stat()
+ endPos := stat.Size()
+ if endPos == 0 {
+ return []byte{}, nil
+ }
+ startPos := endPos - maxLineWidth
+ if startPos < 0 {
+ startPos = 0
+ }
+ buf := make([]byte, endPos-startPos)
+ n, err := f.ReadAt(buf, startPos)
+ if err != nil {
+ return nil, err
+ }
+ lnPos := 0
+ foundLn := false
+ for i := n - 2; i >= 0; i-- {
+ ch := buf[i]
+ if ch == '\n' {
+ foundLn = true
+ lnPos = i
+ break
+ }
+ }
+ if foundLn {
+ return buf[lnPos+1 : n], nil
+ }
+ if startPos == 0 {
+ return buf[0:n], nil
+ }
+
+ return nil, ErrTooLongLine
+}
+
+func ReadLastLines(filename string, n uint) ([]string, error) {
+ return rfile.Tail(filename, int(n))
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/lastline_test.go b/src/go/collectors/go.d.plugin/pkg/logs/lastline_test.go
new file mode 100644
index 000000000..ea0a75e9e
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/lastline_test.go
@@ -0,0 +1,54 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "os"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestReadLastLine(t *testing.T) {
+ tests := []struct {
+ name string
+ content string
+ expected string
+ err error
+ }{
+ {"empty", "", "", nil},
+ {"empty-ln", "\n", "\n", nil},
+ {"one-line", "hello", "hello", nil},
+ {"one-line-ln", "hello\n", "hello\n", nil},
+ {"multi-line", "hello\nworld", "world", nil},
+ {"multi-line-ln", "hello\nworld\n", "world\n", nil},
+ {"long-line", "hello hello hello", "", ErrTooLongLine},
+ {"long-line-ln", "hello hello hello\n", "", ErrTooLongLine},
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ filename := prepareFile(t, test.content)
+ defer func() { _ = os.Remove(filename) }()
+
+ line, err := ReadLastLine(filename, 10)
+
+ if test.err != nil {
+ require.NotNil(t, err)
+ assert.Contains(t, err.Error(), test.err.Error())
+ } else {
+ assert.Equal(t, test.expected, string(line))
+ }
+ })
+ }
+}
+
+func prepareFile(t *testing.T, content string) string {
+ t.Helper()
+ file, err := os.CreateTemp("", "go-test")
+ require.NoError(t, err)
+ defer func() { _ = file.Close() }()
+
+ _, _ = file.WriteString(content)
+ return file.Name()
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/ltsv.go b/src/go/collectors/go.d.plugin/pkg/logs/ltsv.go
new file mode 100644
index 000000000..b7fbceb14
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/ltsv.go
@@ -0,0 +1,95 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+ "unsafe"
+
+ "github.com/Wing924/ltsv"
+)
+
+type (
+ LTSVConfig struct {
+ FieldDelimiter string `yaml:"field_delimiter" json:"field_delimiter"`
+ ValueDelimiter string `yaml:"value_delimiter" json:"value_delimiter"`
+ Mapping map[string]string `yaml:"mapping" json:"mapping"`
+ }
+
+ LTSVParser struct {
+ r *bufio.Reader
+ parser ltsv.Parser
+ mapping map[string]string
+ }
+)
+
+func NewLTSVParser(config LTSVConfig, in io.Reader) (*LTSVParser, error) {
+ p := ltsv.Parser{
+ FieldDelimiter: ltsv.DefaultParser.FieldDelimiter,
+ ValueDelimiter: ltsv.DefaultParser.ValueDelimiter,
+ StrictMode: false,
+ }
+ if config.FieldDelimiter != "" {
+ if d, err := parseLTSVDelimiter(config.FieldDelimiter); err == nil {
+ p.FieldDelimiter = d
+ }
+ }
+ if config.ValueDelimiter != "" {
+ if d, err := parseLTSVDelimiter(config.ValueDelimiter); err == nil {
+ p.ValueDelimiter = d
+ }
+ }
+ parser := &LTSVParser{
+ r: bufio.NewReader(in),
+ parser: p,
+ mapping: config.Mapping,
+ }
+ return parser, nil
+}
+
+func (p *LTSVParser) ReadLine(line LogLine) error {
+ row, err := p.r.ReadSlice('\n')
+ if err != nil && len(row) == 0 {
+ return err
+ }
+ if len(row) > 0 && row[len(row)-1] == '\n' {
+ row = row[:len(row)-1]
+ }
+ return p.Parse(row, line)
+}
+
+func (p *LTSVParser) Parse(row []byte, line LogLine) error {
+ err := p.parser.ParseLine(row, func(label []byte, value []byte) error {
+ s := *(*string)(unsafe.Pointer(&label)) // no alloc, same as in fmt.Builder.String()
+ if v, ok := p.mapping[s]; ok {
+ s = v
+ }
+ return line.Assign(s, string(value))
+ })
+ if err != nil {
+ return &ParseError{msg: fmt.Sprintf("ltsv parse: %v", err), err: err}
+ }
+ return nil
+}
+
+func (p LTSVParser) Info() string {
+ return fmt.Sprintf("ltsv: %q", p.mapping)
+}
+
+func parseLTSVDelimiter(s string) (byte, error) {
+ if isNumber(s) {
+ d, err := strconv.ParseUint(s, 10, 8)
+ if err != nil {
+ return 0, fmt.Errorf("invalid LTSV delimiter: %v", err)
+ }
+ return byte(d), nil
+ }
+ if len(s) != 1 {
+ return 0, errors.New("invalid LTSV delimiter: must be a single character")
+ }
+ return s[0], nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/ltsv_test.go b/src/go/collectors/go.d.plugin/pkg/logs/ltsv_test.go
new file mode 100644
index 000000000..f6d5ec2bd
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/ltsv_test.go
@@ -0,0 +1,125 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/Wing924/ltsv"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+var testLTSVConfig = LTSVConfig{
+ FieldDelimiter: " ",
+ ValueDelimiter: "=",
+ Mapping: map[string]string{"KEY": "key"},
+}
+
+func TestNewLTSVParser(t *testing.T) {
+ tests := []struct {
+ name string
+ config LTSVConfig
+ wantErr bool
+ }{
+ {name: "config", config: testLTSVConfig},
+ {name: "empty config"},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ p, err := NewLTSVParser(tt.config, nil)
+
+ if tt.wantErr {
+ assert.Error(t, err)
+ assert.Nil(t, p)
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, p)
+ if tt.config.FieldDelimiter == "" {
+ assert.Equal(t, ltsv.DefaultParser.FieldDelimiter, p.parser.FieldDelimiter)
+ } else {
+ assert.Equal(t, tt.config.FieldDelimiter, string(p.parser.FieldDelimiter))
+ }
+ if tt.config.ValueDelimiter == "" {
+ assert.Equal(t, ltsv.DefaultParser.ValueDelimiter, p.parser.ValueDelimiter)
+ } else {
+ assert.Equal(t, tt.config.ValueDelimiter, string(p.parser.ValueDelimiter))
+ }
+ assert.Equal(t, tt.config.Mapping, p.mapping)
+ }
+ })
+ }
+}
+
+func TestLTSVParser_ReadLine(t *testing.T) {
+ tests := []struct {
+ name string
+ row string
+ wantErr bool
+ wantParseErr bool
+ }{
+ {name: "no error", row: "A=1 B=2 KEY=3"},
+ {name: "error on parsing", row: "NO LABEL", wantErr: true, wantParseErr: true},
+ {name: "error on assigning", row: "A=1 ERR=2", wantErr: true, wantParseErr: true},
+ {name: "error on reading EOF", row: "", wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var line logLine
+ r := strings.NewReader(tt.row)
+ p, err := NewLTSVParser(testLTSVConfig, r)
+ require.NoError(t, err)
+
+ err = p.ReadLine(&line)
+
+ if tt.wantErr {
+ require.Error(t, err)
+ if tt.wantParseErr {
+ assert.True(t, IsParseError(err))
+ } else {
+ assert.False(t, IsParseError(err))
+ }
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
+
+func TestLTSVParser_Parse(t *testing.T) {
+ tests := []struct {
+ name string
+ row string
+ wantErr bool
+ }{
+ {name: "no error", row: "A=1 B=2"},
+ {name: "error on parsing", row: "NO LABEL", wantErr: true},
+ {name: "error on assigning", row: "A=1 ERR=2", wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var line logLine
+ p, err := NewLTSVParser(testLTSVConfig, nil)
+ require.NoError(t, err)
+
+ err = p.Parse([]byte(tt.row), &line)
+
+ if tt.wantErr {
+ require.Error(t, err)
+ assert.True(t, IsParseError(err))
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
+
+func TestLTSVParser_Info(t *testing.T) {
+ p, err := NewLTSVParser(testLTSVConfig, nil)
+ require.NoError(t, err)
+ assert.NotZero(t, p.Info())
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/parser.go b/src/go/collectors/go.d.plugin/pkg/logs/parser.go
new file mode 100644
index 000000000..d83b4309d
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/parser.go
@@ -0,0 +1,65 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+)
+
+type ParseError struct {
+ msg string
+ err error
+}
+
+func (e ParseError) Error() string { return e.msg }
+
+func (e ParseError) Unwrap() error { return e.err }
+
+func IsParseError(err error) bool { var v *ParseError; return errors.As(err, &v) }
+
+type (
+ LogLine interface {
+ Assign(name string, value string) error
+ }
+
+ Parser interface {
+ ReadLine(LogLine) error
+ Parse(row []byte, line LogLine) error
+ Info() string
+ }
+)
+
+const (
+ TypeCSV = "csv"
+ TypeLTSV = "ltsv"
+ TypeRegExp = "regexp"
+ TypeJSON = "json"
+)
+
+type ParserConfig struct {
+ LogType string `yaml:"log_type" json:"log_type"`
+ CSV CSVConfig `yaml:"csv_config" json:"csv_config"`
+ LTSV LTSVConfig `yaml:"ltsv_config" json:"ltsv_config"`
+ RegExp RegExpConfig `yaml:"regexp_config" json:"regexp_config"`
+ JSON JSONConfig `yaml:"json_config" json:"json_config"`
+}
+
+func NewParser(config ParserConfig, in io.Reader) (Parser, error) {
+ switch config.LogType {
+ case TypeCSV:
+ return NewCSVParser(config.CSV, in)
+ case TypeLTSV:
+ return NewLTSVParser(config.LTSV, in)
+ case TypeRegExp:
+ return NewRegExpParser(config.RegExp, in)
+ case TypeJSON:
+ return NewJSONParser(config.JSON, in)
+ default:
+ return nil, fmt.Errorf("invalid type: %q", config.LogType)
+ }
+}
+
+func isNumber(s string) bool { _, err := strconv.Atoi(s); return err == nil }
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/parser_test.go b/src/go/collectors/go.d.plugin/pkg/logs/parser_test.go
new file mode 100644
index 000000000..88ef46c27
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/parser_test.go
@@ -0,0 +1,3 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/reader.go b/src/go/collectors/go.d.plugin/pkg/logs/reader.go
new file mode 100644
index 000000000..ee526a9e3
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/reader.go
@@ -0,0 +1,193 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "sort"
+
+ "github.com/netdata/netdata/go/go.d.plugin/logger"
+)
+
+const (
+ maxEOF = 60
+)
+
+var (
+ ErrNoMatchedFile = errors.New("no matched files")
+)
+
+// Reader is a log rotate aware Reader
+// TODO: better reopen algorithm
+// TODO: handle truncate
+type Reader struct {
+ file *os.File
+ path string
+ excludePath string
+ eofCounter int
+ continuousEOF int
+ log *logger.Logger
+}
+
+// Open a file and seek to end of the file.
+// path: the shell file name pattern
+// excludePath: the shell file name pattern
+func Open(path string, excludePath string, log *logger.Logger) (*Reader, error) {
+ var err error
+ if path, err = filepath.Abs(path); err != nil {
+ return nil, err
+ }
+ if _, err = filepath.Match(path, "/"); err != nil {
+ return nil, fmt.Errorf("bad path syntax: %q", path)
+ }
+ if _, err = filepath.Match(excludePath, "/"); err != nil {
+ return nil, fmt.Errorf("bad exclude_path syntax: %q", path)
+ }
+ r := &Reader{
+ path: path,
+ excludePath: excludePath,
+ log: log,
+ }
+
+ if err = r.open(); err != nil {
+ return nil, err
+ }
+ return r, nil
+}
+
+// CurrentFilename get current opened file name
+func (r *Reader) CurrentFilename() string {
+ return r.file.Name()
+}
+
+func (r *Reader) open() error {
+ path := r.findFile()
+ if path == "" {
+ r.log.Debugf("couldn't find log file, used path: '%s', exclude_path: '%s'", r.path, r.excludePath)
+ return ErrNoMatchedFile
+ }
+ r.log.Debug("open log file: ", path)
+ file, err := os.Open(path)
+ if err != nil {
+ return err
+ }
+ stat, err := file.Stat()
+ if err != nil {
+ return err
+ }
+ if _, err = file.Seek(stat.Size(), io.SeekStart); err != nil {
+ return err
+ }
+ r.file = file
+ return nil
+}
+
+func (r *Reader) Read(p []byte) (n int, err error) {
+ n, err = r.file.Read(p)
+ if err != nil {
+ switch err {
+ case io.EOF:
+ err = r.handleEOFErr()
+ case os.ErrInvalid: // r.file is nil after Close
+ err = r.handleInvalidArgErr()
+ }
+ return
+ }
+ r.continuousEOF = 0
+ return
+}
+
+func (r *Reader) handleEOFErr() (err error) {
+ err = io.EOF
+ r.eofCounter++
+ r.continuousEOF++
+ if r.eofCounter < maxEOF || r.continuousEOF < 2 {
+ return err
+ }
+ if err2 := r.reopen(); err2 != nil {
+ err = err2
+ }
+ return err
+}
+
+func (r *Reader) handleInvalidArgErr() (err error) {
+ err = io.EOF
+ if err2 := r.reopen(); err2 != nil {
+ err = err2
+ }
+ return err
+}
+
+func (r *Reader) Close() (err error) {
+ if r == nil || r.file == nil {
+ return
+ }
+ r.log.Debug("close log file: ", r.file.Name())
+ err = r.file.Close()
+ r.file = nil
+ r.eofCounter = 0
+ return
+}
+
+func (r *Reader) reopen() error {
+ r.log.Debugf("reopen, look for: %s", r.path)
+ _ = r.Close()
+ return r.open()
+}
+
+func (r *Reader) findFile() string {
+ return find(r.path, r.excludePath)
+}
+
+func find(path, exclude string) string {
+ return finder{}.find(path, exclude)
+}
+
+// TODO: tests
+type finder struct{}
+
+func (f finder) find(path, exclude string) string {
+ files, _ := filepath.Glob(path)
+ if len(files) == 0 {
+ return ""
+ }
+
+ files = f.filter(files, exclude)
+ if len(files) == 0 {
+ return ""
+ }
+
+ return f.findLastFile(files)
+}
+
+func (f finder) filter(files []string, exclude string) []string {
+ if exclude == "" {
+ return files
+ }
+
+ fs := make([]string, 0, len(files))
+ for _, file := range files {
+ if ok, _ := filepath.Match(exclude, file); ok {
+ continue
+ }
+ fs = append(fs, file)
+ }
+ return fs
+}
+
+// TODO: the logic is probably wrong
+func (f finder) findLastFile(files []string) string {
+ sort.Strings(files)
+ for i := len(files) - 1; i >= 0; i-- {
+ stat, err := os.Stat(files[i])
+ if err != nil || !stat.Mode().IsRegular() {
+ continue
+ }
+ return files[i]
+ }
+ return ""
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/reader_test.go b/src/go/collectors/go.d.plugin/pkg/logs/reader_test.go
new file mode 100644
index 000000000..e6ef47fe7
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/reader_test.go
@@ -0,0 +1,245 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestReader_Read(t *testing.T) {
+ reader, teardown := prepareTestReader(t)
+ defer teardown()
+
+ r := testReader{bufio.NewReader(reader)}
+ filename := reader.CurrentFilename()
+ numLogs := 5
+ var sum int
+
+ for i := 0; i < 10; i++ {
+ appendLogs(t, filename, time.Millisecond*10, numLogs)
+ n, err := r.readUntilEOF()
+ sum += n
+
+ assert.Equal(t, io.EOF, err)
+ assert.Equal(t, numLogs*(i+1), sum)
+ }
+}
+
+func TestReader_Read_HandleFileRotation(t *testing.T) {
+ reader, teardown := prepareTestReader(t)
+ defer teardown()
+
+ r := testReader{bufio.NewReader(reader)}
+ filename := reader.CurrentFilename()
+ numLogs := 5
+ rotateFile(t, filename)
+ appendLogs(t, filename, time.Millisecond*10, numLogs)
+
+ n, err := r.readUntilEOFTimes(maxEOF)
+ assert.Equal(t, io.EOF, err)
+ assert.Equal(t, 0, n)
+
+ appendLogs(t, filename, time.Millisecond*10, numLogs)
+ n, err = r.readUntilEOF()
+ assert.Equal(t, io.EOF, err)
+ assert.Equal(t, numLogs, n)
+}
+
+func TestReader_Read_HandleFileRotationWithDelay(t *testing.T) {
+ reader, teardown := prepareTestReader(t)
+ defer teardown()
+
+ r := testReader{bufio.NewReader(reader)}
+ filename := reader.CurrentFilename()
+ _ = os.Remove(filename)
+
+ // trigger reopen first time
+ n, err := r.readUntilEOFTimes(maxEOF)
+ assert.Equal(t, ErrNoMatchedFile, err)
+ assert.Equal(t, 0, n)
+
+ f, err := os.Create(filename)
+ require.NoError(t, err)
+ _ = f.Close()
+
+ // trigger reopen 2nd time
+ n, err = r.readUntilEOF()
+ assert.Equal(t, io.EOF, err)
+ assert.Equal(t, 0, n)
+
+ numLogs := 5
+ appendLogs(t, filename, time.Millisecond*10, numLogs)
+ n, err = r.readUntilEOF()
+ assert.Equal(t, io.EOF, err)
+ assert.Equal(t, numLogs, n)
+}
+
+func TestReader_Close(t *testing.T) {
+ reader, teardown := prepareTestReader(t)
+ defer teardown()
+
+ assert.NoError(t, reader.Close())
+ assert.Nil(t, reader.file)
+}
+
+func TestReader_Close_NilFile(t *testing.T) {
+ var r Reader
+ assert.NoError(t, r.Close())
+}
+
+func TestOpen(t *testing.T) {
+ tempFileName1 := prepareTempFile(t, "*-web_log-open-test-1.log")
+ tempFileName2 := prepareTempFile(t, "*-web_log-open-test-2.log")
+ tempFileName3 := prepareTempFile(t, "*-web_log-open-test-3.log")
+ defer func() {
+ _ = os.Remove(tempFileName1)
+ _ = os.Remove(tempFileName2)
+ _ = os.Remove(tempFileName3)
+ }()
+
+ makePath := func(s string) string {
+ return filepath.Join(os.TempDir(), s)
+ }
+
+ tests := []struct {
+ name string
+ path string
+ exclude string
+ err bool
+ }{
+ {
+ name: "match without exclude",
+ path: makePath("*-web_log-open-test-[1-3].log"),
+ },
+ {
+ name: "match with exclude",
+ path: makePath("*-web_log-open-test-[1-3].log"),
+ exclude: makePath("*-web_log-open-test-[2-3].log"),
+ },
+ {
+ name: "exclude everything",
+ path: makePath("*-web_log-open-test-[1-3].log"),
+ exclude: makePath("*"),
+ err: true,
+ },
+ {
+ name: "no match",
+ path: makePath("*-web_log-no-match-test-[1-3].log"),
+ err: true,
+ },
+ {
+ name: "bad path pattern",
+ path: "[qw",
+ err: true,
+ },
+ {
+ name: "bad exclude path pattern",
+ path: "[qw",
+ err: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ r, err := Open(tt.path, tt.exclude, nil)
+
+ if tt.err {
+ assert.Error(t, err)
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, r.file)
+ _ = r.Close()
+ }
+ })
+ }
+}
+
+func TestReader_CurrentFilename(t *testing.T) {
+ reader, teardown := prepareTestReader(t)
+ defer teardown()
+
+ assert.Equal(t, reader.file.Name(), reader.CurrentFilename())
+}
+
+type testReader struct {
+ *bufio.Reader
+}
+
+func (r *testReader) readUntilEOF() (n int, err error) {
+ for {
+ _, err = r.ReadBytes('\n')
+ if err != nil {
+ break
+ }
+ n++
+ }
+ return n, err
+}
+
+func (r *testReader) readUntilEOFTimes(times int) (sum int, err error) {
+ var n int
+ for i := 0; i < times; i++ {
+ n, err = r.readUntilEOF()
+ if err != io.EOF {
+ break
+ }
+ sum += n
+ }
+ return sum, err
+}
+
+func prepareTempFile(t *testing.T, pattern string) string {
+ t.Helper()
+ f, err := os.CreateTemp("", pattern)
+ require.NoError(t, err)
+ return f.Name()
+}
+
+func prepareTestReader(t *testing.T) (reader *Reader, teardown func()) {
+ t.Helper()
+ filename := prepareTempFile(t, "*-web_log-test.log")
+ f, err := os.Open(filename)
+ require.NoError(t, err)
+
+ teardown = func() {
+ _ = os.Remove(filename)
+ _ = reader.file.Close()
+ }
+ reader = &Reader{
+ file: f,
+ path: filename,
+ }
+ return reader, teardown
+}
+
+func rotateFile(t *testing.T, filename string) {
+ t.Helper()
+ require.NoError(t, os.Remove(filename))
+ f, err := os.Create(filename)
+ require.NoError(t, err)
+ _ = f.Close()
+}
+
+func appendLogs(t *testing.T, filename string, interval time.Duration, numOfLogs int) {
+ t.Helper()
+ base := filepath.Base(filename)
+ file, err := os.OpenFile(filename, os.O_RDWR|os.O_APPEND, os.ModeAppend)
+ require.NoError(t, err)
+ require.NotNil(t, file)
+ defer func() { _ = file.Close() }()
+
+ for i := 0; i < numOfLogs; i++ {
+ _, err = fmt.Fprintln(file, "line", i, "filename", base)
+ require.NoError(t, err)
+ time.Sleep(interval)
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/regexp.go b/src/go/collectors/go.d.plugin/pkg/logs/regexp.go
new file mode 100644
index 000000000..e0dee1d02
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/regexp.go
@@ -0,0 +1,76 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "io"
+ "regexp"
+)
+
+type (
+ RegExpConfig struct {
+ Pattern string `yaml:"pattern" json:"pattern"`
+ }
+
+ RegExpParser struct {
+ r *bufio.Reader
+ pattern *regexp.Regexp
+ }
+)
+
+func NewRegExpParser(config RegExpConfig, in io.Reader) (*RegExpParser, error) {
+ if config.Pattern == "" {
+ return nil, errors.New("empty pattern")
+ }
+
+ pattern, err := regexp.Compile(config.Pattern)
+ if err != nil {
+ return nil, fmt.Errorf("compile: %w", err)
+ }
+
+ if pattern.NumSubexp() == 0 {
+ return nil, errors.New("pattern has no named subgroups")
+ }
+
+ p := &RegExpParser{
+ r: bufio.NewReader(in),
+ pattern: pattern,
+ }
+ return p, nil
+}
+
+func (p *RegExpParser) ReadLine(line LogLine) error {
+ row, err := p.r.ReadSlice('\n')
+ if err != nil && len(row) == 0 {
+ return err
+ }
+ if len(row) > 0 && row[len(row)-1] == '\n' {
+ row = row[:len(row)-1]
+ }
+ return p.Parse(row, line)
+}
+
+func (p *RegExpParser) Parse(row []byte, line LogLine) error {
+ match := p.pattern.FindSubmatch(row)
+ if len(match) == 0 {
+ return &ParseError{msg: "regexp parse: unmatched line"}
+ }
+
+ for i, name := range p.pattern.SubexpNames() {
+ if name == "" || match[i] == nil {
+ continue
+ }
+ err := line.Assign(name, string(match[i]))
+ if err != nil {
+ return &ParseError{msg: fmt.Sprintf("regexp parse: %v", err), err: err}
+ }
+ }
+ return nil
+}
+
+func (p RegExpParser) Info() string {
+ return fmt.Sprintf("regexp: %s", p.pattern)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/logs/regexp_test.go b/src/go/collectors/go.d.plugin/pkg/logs/regexp_test.go
new file mode 100644
index 000000000..fc7bacaa5
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/logs/regexp_test.go
@@ -0,0 +1,131 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package logs
+
+import (
+ "errors"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewRegExpParser(t *testing.T) {
+ tests := []struct {
+ name string
+ pattern string
+ wantErr bool
+ }{
+ {name: "valid pattern", pattern: `(?P<A>\d+) (?P<B>\d+)`},
+ {name: "no names subgroups in pattern", pattern: `(?:\d+) (?:\d+)`, wantErr: true},
+ {name: "invalid pattern", pattern: `(((?P<A>\d+) (?P<B>\d+)`, wantErr: true},
+ {name: "empty pattern", wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ p, err := NewRegExpParser(RegExpConfig{Pattern: tt.pattern}, nil)
+ if tt.wantErr {
+ assert.Error(t, err)
+ assert.Nil(t, p)
+ } else {
+ assert.NoError(t, err)
+ assert.NotNil(t, p)
+ }
+ })
+ }
+}
+
+func TestRegExpParser_ReadLine(t *testing.T) {
+ tests := []struct {
+ name string
+ row string
+ pattern string
+ wantErr bool
+ wantParseErr bool
+ }{
+ {name: "match and no error", row: "1 2", pattern: `(?P<A>\d+) (?P<B>\d+)`},
+ {name: "match but error on assigning", row: "1 2", pattern: `(?P<A>\d+) (?P<ERR>\d+)`, wantErr: true, wantParseErr: true},
+ {name: "not match", row: "A B", pattern: `(?P<A>\d+) (?P<B>\d+)`, wantErr: true, wantParseErr: true},
+ {name: "not match multiline", row: "a b\n3 4", pattern: `(?P<A>\d+) (?P<B>\d+)`, wantErr: true, wantParseErr: true},
+ {name: "error on reading EOF", row: "", pattern: `(?P<A>\d+) (?P<B>\d+)`, wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var line logLine
+ r := strings.NewReader(tt.row)
+ p, err := NewRegExpParser(RegExpConfig{Pattern: tt.pattern}, r)
+ require.NoError(t, err)
+
+ err = p.ReadLine(&line)
+ if tt.wantErr {
+ require.Error(t, err)
+ if tt.wantParseErr {
+ assert.True(t, IsParseError(err))
+ } else {
+ assert.False(t, IsParseError(err))
+ }
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
+
+func TestRegExpParser_Parse(t *testing.T) {
+ tests := []struct {
+ name string
+ row string
+ pattern string
+ wantErr bool
+ }{
+ {name: "match and no error", row: "1 2", pattern: `(?P<A>\d+) (?P<B>\d+)`},
+ {name: "match but error on assigning", row: "1 2", pattern: `(?P<A>\d+) (?P<ERR>\d+)`, wantErr: true},
+ {name: "not match", row: "A B", pattern: `(?P<A>\d+) (?P<B>\d+)`, wantErr: true},
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var line logLine
+ p, err := NewRegExpParser(RegExpConfig{Pattern: tt.pattern}, nil)
+ require.NoError(t, err)
+
+ err = p.Parse([]byte(tt.row), &line)
+ if tt.wantErr {
+ require.Error(t, err)
+ assert.True(t, IsParseError(err))
+ } else {
+ assert.NoError(t, err)
+ }
+ })
+ }
+}
+
+func TestRegExpParser_Info(t *testing.T) {
+ p, err := NewRegExpParser(RegExpConfig{Pattern: `(?P<A>\d+) (?P<B>\d+)`}, nil)
+ require.NoError(t, err)
+ assert.NotZero(t, p.Info())
+}
+
+type logLine struct {
+ assigned map[string]string
+}
+
+func newLogLine() *logLine {
+ return &logLine{
+ assigned: make(map[string]string),
+ }
+}
+
+func (l *logLine) Assign(name, val string) error {
+ switch name {
+ case "$ERR", "ERR":
+ return errors.New("assign error")
+ }
+ if l.assigned != nil {
+ l.assigned[name] = val
+ }
+ return nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/README.md b/src/go/collectors/go.d.plugin/pkg/matcher/README.md
new file mode 100644
index 000000000..e9442b5e5
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/README.md
@@ -0,0 +1,142 @@
+<!--
+title: "matcher"
+custom_edit_url: "https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/matcher/README.md"
+sidebar_label: "matcher"
+learn_status: "Published"
+learn_rel_path: "Developers/External plugins/go.d.plugin/Helper Packages"
+-->
+
+# matcher
+## Supported Format
+
+* string
+* glob
+* regexp
+* simple patterns
+
+Depending on the symbol at the start of the string, the `matcher` will use one of the supported formats.
+
+| matcher | short format | long format |
+|-----------------|--------------|-------------------|
+| string | ` =` | `string` |
+| glob | `*` | `glob` |
+| regexp | `~` | `regexp` |
+| simple patterns | | `simple_patterns` |
+
+Example:
+
+- `* pattern`: It will use the `glob` matcher to find the `pattern` in the string.
+
+### Syntax
+
+**Tip**: Read `::=` as `is defined as`.
+
+```
+Short Syntax
+ [ <not> ] <format> <space> <expr>
+
+ <not> ::= '!'
+ negative expression
+ <format> ::= [ '=', '~', '*' ]
+ '=' means string match
+ '~' means regexp match
+ '*' means glob match
+ <space> ::= { ' ' | '\t' | '\n' | '\n' | '\r' }
+ <expr> ::= any string
+
+ Long Syntax
+ [ <not> ] <format> <separator> <expr>
+
+ <format> ::= [ 'string' | 'glob' | 'regexp' | 'simple_patterns' ]
+ <not> ::= '!'
+ negative expression
+ <separator> ::= ':'
+ <expr> ::= any string
+```
+
+When using the short syntax, you can enable the glob format by starting the string with a `*`, while in the long syntax
+you need to define it more explicitly. The following examples are identical. `simple_patterns` can be used **only** with
+the long syntax.
+
+Examples:
+
+- Short Syntax: `'* * '`
+- Long Syntax: `'glob:*'`
+
+### String matcher
+
+The string matcher reports whether the given value equals to the string.
+
+Examples:
+
+- `'= foo'` matches only if the string is `foo`.
+- `'!= bar'` matches any string that is not `bar`.
+
+String matcher means **exact match** of the `string`. There are other string match related cases:
+
+- string has prefix `something`
+- string has suffix `something`
+- string contains `something`
+
+This is achievable using the `glob` matcher:
+
+- `* PREFIX*`, means that it matches with any string that *starts* with `PREFIX`, e.g `PREFIXnetdata`
+- `* *SUFFIX`, means that it matches with any string that *ends* with `SUFFIX`, e.g `netdataSUFFIX`
+- `* *SUBSTRING*`, means that it matches with any string that *contains* `SUBSTRING`, e.g `netdataSUBSTRINGnetdata`
+
+### Glob matcher
+
+The glob matcher reports whether the given value matches the wildcard pattern. It uses the standard `golang`
+library `path`. You can read more about the library in the [golang documentation](https://golang.org/pkg/path/#Match),
+where you can also practice with the library in order to learn the syntax and use it in your Netdata configuration.
+
+The pattern syntax is:
+
+```
+ pattern:
+ { term }
+ term:
+ '*' matches any sequence of characters
+ '?' matches any single character
+ '[' [ '^' ] { character-range } ']'
+ character class (must be non-empty)
+ c matches character c (c != '*', '?', '\\', '[')
+ '\\' c matches character c
+
+ character-range:
+ c matches character c (c != '\\', '-', ']')
+ '\\' c matches character c
+ lo '-' hi matches character c for lo <= c <= hi
+```
+
+Examples:
+
+- `* ?` matches any string that is a single character.
+- `'?a'` matches any 2 character string that starts with any character and the second character is `a`, like `ba` but
+ not `bb` or `bba`.
+- `'[^abc]'` matches any character that is NOT a,b,c. `'[abc]'` matches only a, b, c.
+- `'*[a-d]'` matches any string (`*`) that ends with a character that is between `a` and `d` (i.e `a,b,c,d`).
+
+### Regexp matcher
+
+The regexp matcher reports whether the given value matches the RegExp pattern ( use regexp.Match ).
+
+The RegExp syntax is described at https://golang.org/pkg/regexp/syntax/.
+
+Learn more about regular expressions at [RegexOne](https://regexone.com/).
+
+### Simple patterns matcher
+
+The simple patterns matcher reports whether the given value matches the simple patterns.
+
+Simple patterns are a space separated list of words. Each word may use any number of wildcards `*`. Simple patterns
+allow negative matches by prefixing a word with `!`.
+
+Examples:
+
+- `!*bad* *` matches anything, except all those that contain the word bad.
+- `*foobar* !foo* !*bar *` matches everything containing foobar, except strings that start with foo or end with bar.
+
+
+
+
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/cache.go b/src/go/collectors/go.d.plugin/pkg/matcher/cache.go
new file mode 100644
index 000000000..4594fa06f
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/cache.go
@@ -0,0 +1,56 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import "sync"
+
+type (
+ cachedMatcher struct {
+ matcher Matcher
+
+ mux sync.RWMutex
+ cache map[string]bool
+ }
+)
+
+// WithCache adds cache to the matcher.
+func WithCache(m Matcher) Matcher {
+ switch m {
+ case TRUE(), FALSE():
+ return m
+ default:
+ return &cachedMatcher{matcher: m, cache: make(map[string]bool)}
+ }
+}
+
+func (m *cachedMatcher) Match(b []byte) bool {
+ s := string(b)
+ if result, ok := m.fetch(s); ok {
+ return result
+ }
+ result := m.matcher.Match(b)
+ m.put(s, result)
+ return result
+}
+
+func (m *cachedMatcher) MatchString(s string) bool {
+ if result, ok := m.fetch(s); ok {
+ return result
+ }
+ result := m.matcher.MatchString(s)
+ m.put(s, result)
+ return result
+}
+
+func (m *cachedMatcher) fetch(key string) (result bool, ok bool) {
+ m.mux.RLock()
+ result, ok = m.cache[key]
+ m.mux.RUnlock()
+ return
+}
+
+func (m *cachedMatcher) put(key string, result bool) {
+ m.mux.Lock()
+ m.cache[key] = result
+ m.mux.Unlock()
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/cache_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/cache_test.go
new file mode 100644
index 000000000..a545777b3
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/cache_test.go
@@ -0,0 +1,53 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestWithCache(t *testing.T) {
+ regMatcher, _ := NewRegExpMatcher("[0-9]+")
+ cached := WithCache(regMatcher)
+
+ assert.True(t, cached.MatchString("1"))
+ assert.True(t, cached.MatchString("1"))
+ assert.True(t, cached.Match([]byte("2")))
+ assert.True(t, cached.Match([]byte("2")))
+}
+
+func TestWithCache_specialCase(t *testing.T) {
+ assert.Equal(t, TRUE(), WithCache(TRUE()))
+ assert.Equal(t, FALSE(), WithCache(FALSE()))
+}
+func BenchmarkCachedMatcher_MatchString_cache_hit(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ expr string
+ target string
+ }{
+ {"stringFullMatcher", "= abc123", "abc123"},
+ {"stringPrefixMatcher", "~ ^abc123", "abc123456"},
+ {"stringSuffixMatcher", "~ abc123$", "hello abc123"},
+ {"stringSuffixMatcher", "~ abc123", "hello abc123 world"},
+ {"globMatcher", "* abc*def", "abc12345678def"},
+ {"regexp", "~ [0-9]+", "1234567890"},
+ }
+ for _, bm := range benchmarks {
+ m := Must(Parse(bm.expr))
+ b.Run(bm.name+"_raw", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ m.MatchString(bm.target)
+ }
+ })
+ b.Run(bm.name+"_cache", func(b *testing.B) {
+ cached := WithCache(m)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ cached.MatchString(bm.target)
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/doc.go b/src/go/collectors/go.d.plugin/pkg/matcher/doc.go
new file mode 100644
index 000000000..33b06988d
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/doc.go
@@ -0,0 +1,40 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+/*
+Package matcher implements vary formats of string matcher.
+
+Supported Format
+
+ string
+ glob
+ regexp
+ simple patterns
+
+The string matcher reports whether the given value equals to the string ( use == ).
+
+The glob matcher reports whether the given value matches the wildcard pattern.
+The pattern syntax is:
+
+ pattern:
+ { term }
+ term:
+ '*' matches any sequence of characters
+ '?' matches any single character
+ '[' [ '^' ] { character-range } ']'
+ character class (must be non-empty)
+ c matches character c (c != '*', '?', '\\', '[')
+ '\\' c matches character c
+
+ character-range:
+ c matches character c (c != '\\', '-', ']')
+ '\\' c matches character c
+ lo '-' hi matches character c for lo <= c <= hi
+
+The regexp matcher reports whether the given value matches the RegExp pattern ( use regexp.Match ).
+The RegExp syntax is described at https://golang.org/pkg/regexp/syntax/.
+
+The simple patterns matcher reports whether the given value matches the simple patterns.
+The simple patterns is a custom format used in netdata,
+it's syntax is described at https://docs.netdata.cloud/libnetdata/simple_pattern/.
+*/
+package matcher
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/doc_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/doc_test.go
new file mode 100644
index 000000000..d04b39a54
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/doc_test.go
@@ -0,0 +1,49 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher_test
+
+import "github.com/netdata/netdata/go/go.d.plugin/pkg/matcher"
+
+func ExampleNew_string_format() {
+ // create a string matcher, which perform full text match
+ m, err := matcher.New(matcher.FmtString, "hello")
+ if err != nil {
+ panic(err)
+ }
+ m.MatchString("hello") // => true
+ m.MatchString("hello world") // => false
+}
+
+func ExampleNew_glob_format() {
+ // create a glob matcher, which perform wildcard match
+ m, err := matcher.New(matcher.FmtString, "hello*")
+ if err != nil {
+ panic(err)
+ }
+ m.MatchString("hello") // => true
+ m.MatchString("hello world") // => true
+ m.MatchString("Hello world") // => false
+}
+
+func ExampleNew_simple_patterns_format() {
+ // create a simple patterns matcher, which perform wildcard match
+ m, err := matcher.New(matcher.FmtSimplePattern, "hello* !*world *")
+ if err != nil {
+ panic(err)
+ }
+ m.MatchString("hello") // => true
+ m.MatchString("hello world") // => true
+ m.MatchString("Hello world") // => false
+ m.MatchString("Hello world!") // => false
+}
+
+func ExampleNew_regexp_format() {
+ // create a regexp matcher, which perform wildcard match
+ m, err := matcher.New(matcher.FmtRegExp, "[0-9]+")
+ if err != nil {
+ panic(err)
+ }
+ m.MatchString("1") // => true
+ m.MatchString("1a") // => true
+ m.MatchString("a") // => false
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/expr.go b/src/go/collectors/go.d.plugin/pkg/matcher/expr.go
new file mode 100644
index 000000000..f9155f761
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/expr.go
@@ -0,0 +1,62 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "errors"
+ "fmt"
+)
+
+type (
+ Expr interface {
+ Parse() (Matcher, error)
+ }
+
+ // SimpleExpr is a simple expression to describe the condition:
+ // (includes[0].Match(v) || includes[1].Match(v) || ...) && !(excludes[0].Match(v) || excludes[1].Match(v) || ...)
+ SimpleExpr struct {
+ Includes []string `yaml:"includes" json:"includes"`
+ Excludes []string `yaml:"excludes" json:"excludes"`
+ }
+)
+
+var (
+ ErrEmptyExpr = errors.New("empty expression")
+)
+
+// Empty returns true if both Includes and Excludes are empty. You can't
+func (s *SimpleExpr) Empty() bool {
+ return len(s.Includes) == 0 && len(s.Excludes) == 0
+}
+
+// Parse parses the given matchers in Includes and Excludes
+func (s *SimpleExpr) Parse() (Matcher, error) {
+ if len(s.Includes) == 0 && len(s.Excludes) == 0 {
+ return nil, ErrEmptyExpr
+ }
+ var (
+ includes = FALSE()
+ excludes = FALSE()
+ )
+ if len(s.Includes) > 0 {
+ for _, item := range s.Includes {
+ m, err := Parse(item)
+ if err != nil {
+ return nil, fmt.Errorf("parse matcher %q error: %v", item, err)
+ }
+ includes = Or(includes, m)
+ }
+ } else {
+ includes = TRUE()
+ }
+
+ for _, item := range s.Excludes {
+ m, err := Parse(item)
+ if err != nil {
+ return nil, fmt.Errorf("parse matcher %q error: %v", item, err)
+ }
+ excludes = Or(excludes, m)
+ }
+
+ return And(includes, Not(excludes)), nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/expr_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/expr_test.go
new file mode 100644
index 000000000..93a183226
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/expr_test.go
@@ -0,0 +1,100 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSimpleExpr_none(t *testing.T) {
+ expr := &SimpleExpr{}
+
+ m, err := expr.Parse()
+ assert.EqualError(t, err, ErrEmptyExpr.Error())
+ assert.Nil(t, m)
+}
+
+func TestSimpleExpr_include(t *testing.T) {
+ expr := &SimpleExpr{
+ Includes: []string{
+ "~ /api/",
+ "~ .php$",
+ },
+ }
+
+ m, err := expr.Parse()
+ assert.NoError(t, err)
+
+ assert.True(t, m.MatchString("/api/a.php"))
+ assert.True(t, m.MatchString("/api/a.php2"))
+ assert.True(t, m.MatchString("/api2/a.php"))
+ assert.True(t, m.MatchString("/api/img.php"))
+ assert.False(t, m.MatchString("/api2/img.php2"))
+}
+
+func TestSimpleExpr_exclude(t *testing.T) {
+ expr := &SimpleExpr{
+ Excludes: []string{
+ "~ /api/img",
+ },
+ }
+
+ m, err := expr.Parse()
+ assert.NoError(t, err)
+
+ assert.True(t, m.MatchString("/api/a.php"))
+ assert.True(t, m.MatchString("/api/a.php2"))
+ assert.True(t, m.MatchString("/api2/a.php"))
+ assert.False(t, m.MatchString("/api/img.php"))
+ assert.True(t, m.MatchString("/api2/img.php2"))
+}
+
+func TestSimpleExpr_both(t *testing.T) {
+ expr := &SimpleExpr{
+ Includes: []string{
+ "~ /api/",
+ "~ .php$",
+ },
+ Excludes: []string{
+ "~ /api/img",
+ },
+ }
+
+ m, err := expr.Parse()
+ assert.NoError(t, err)
+
+ assert.True(t, m.MatchString("/api/a.php"))
+ assert.True(t, m.MatchString("/api/a.php2"))
+ assert.True(t, m.MatchString("/api2/a.php"))
+ assert.False(t, m.MatchString("/api/img.php"))
+ assert.False(t, m.MatchString("/api2/img.php2"))
+}
+
+func TestSimpleExpr_Parse_NG(t *testing.T) {
+ {
+ expr := &SimpleExpr{
+ Includes: []string{
+ "~ (ab",
+ "~ .php$",
+ },
+ }
+
+ m, err := expr.Parse()
+ assert.Error(t, err)
+ assert.Nil(t, m)
+ }
+ {
+ expr := &SimpleExpr{
+ Excludes: []string{
+ "~ (ab",
+ "~ .php$",
+ },
+ }
+
+ m, err := expr.Parse()
+ assert.Error(t, err)
+ assert.Nil(t, m)
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/glob.go b/src/go/collectors/go.d.plugin/pkg/matcher/glob.go
new file mode 100644
index 000000000..726c94c45
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/glob.go
@@ -0,0 +1,265 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "errors"
+ "path/filepath"
+ "regexp"
+ "unicode/utf8"
+)
+
+// globMatcher implements Matcher, it uses filepath.MatchString to match.
+type globMatcher string
+
+var (
+ errBadGlobPattern = errors.New("bad glob pattern")
+ erGlobPattern = regexp.MustCompile(`(?s)^(?:[*?]|\[\^?([^\\-\]]|\\.|.-.)+\]|\\.|[^\*\?\\\[])*$`)
+)
+
+// NewGlobMatcher create a new matcher with glob format
+func NewGlobMatcher(expr string) (Matcher, error) {
+ switch expr {
+ case "":
+ return stringFullMatcher(""), nil
+ case "*":
+ return TRUE(), nil
+ }
+
+ // any strings pass this regexp check are valid pattern
+ if !erGlobPattern.MatchString(expr) {
+ return nil, errBadGlobPattern
+ }
+
+ size := len(expr)
+ chars := []rune(expr)
+ startWith := true
+ endWith := true
+ startIdx := 0
+ endIdx := size - 1
+ if chars[startIdx] == '*' {
+ startWith = false
+ startIdx = 1
+ }
+ if chars[endIdx] == '*' {
+ endWith = false
+ endIdx--
+ }
+
+ unescapedExpr := make([]rune, 0, endIdx-startIdx+1)
+ for i := startIdx; i <= endIdx; i++ {
+ ch := chars[i]
+ if ch == '\\' {
+ nextCh := chars[i+1]
+ unescapedExpr = append(unescapedExpr, nextCh)
+ i++
+ } else if isGlobMeta(ch) {
+ return globMatcher(expr), nil
+ } else {
+ unescapedExpr = append(unescapedExpr, ch)
+ }
+ }
+
+ return NewStringMatcher(string(unescapedExpr), startWith, endWith)
+}
+
+func isGlobMeta(ch rune) bool {
+ switch ch {
+ case '*', '?', '[':
+ return true
+ default:
+ return false
+ }
+}
+
+// Match matches.
+func (m globMatcher) Match(b []byte) bool {
+ return m.MatchString(string(b))
+}
+
+// MatchString matches.
+func (m globMatcher) MatchString(line string) bool {
+ rs, _ := m.globMatch(line)
+ return rs
+}
+
+func (m globMatcher) globMatch(name string) (matched bool, err error) {
+ pattern := string(m)
+Pattern:
+ for len(pattern) > 0 {
+ var star bool
+ var chunk string
+ star, chunk, pattern = scanChunk(pattern)
+ if star && chunk == "" {
+ // Trailing * matches rest of string unless it has a /.
+ // return !strings.Contains(name, string(Separator)), nil
+
+ return true, nil
+ }
+ // Look for match at current position.
+ t, ok, err := matchChunk(chunk, name)
+ // if we're the last chunk, make sure we've exhausted the name
+ // otherwise we'll give a false result even if we could still match
+ // using the star
+ if ok && (len(t) == 0 || len(pattern) > 0) {
+ name = t
+ continue
+ }
+ if err != nil {
+ return false, err
+ }
+ if star {
+ // Look for match skipping i+1 bytes.
+ // Cannot skip /.
+ for i := 0; i < len(name); i++ {
+ //for i := 0; i < len(name) && name[i] != Separator; i++ {
+ t, ok, err := matchChunk(chunk, name[i+1:])
+ if ok {
+ // if we're the last chunk, make sure we exhausted the name
+ if len(pattern) == 0 && len(t) > 0 {
+ continue
+ }
+ name = t
+ continue Pattern
+ }
+ if err != nil {
+ return false, err
+ }
+ }
+ }
+ return false, nil
+ }
+ return len(name) == 0, nil
+}
+
+// scanChunk gets the next segment of pattern, which is a non-star string
+// possibly preceded by a star.
+func scanChunk(pattern string) (star bool, chunk, rest string) {
+ for len(pattern) > 0 && pattern[0] == '*' {
+ pattern = pattern[1:]
+ star = true
+ }
+ inrange := false
+ var i int
+Scan:
+ for i = 0; i < len(pattern); i++ {
+ switch pattern[i] {
+ case '\\':
+ if i+1 < len(pattern) {
+ i++
+ }
+ case '[':
+ inrange = true
+ case ']':
+ inrange = false
+ case '*':
+ if !inrange {
+ break Scan
+ }
+ }
+ }
+ return star, pattern[0:i], pattern[i:]
+}
+
+// matchChunk checks whether chunk matches the beginning of s.
+// If so, it returns the remainder of s (after the match).
+// Chunk is all single-character operators: literals, char classes, and ?.
+func matchChunk(chunk, s string) (rest string, ok bool, err error) {
+ for len(chunk) > 0 {
+ if len(s) == 0 {
+ return
+ }
+ switch chunk[0] {
+ case '[':
+ // character class
+ r, n := utf8.DecodeRuneInString(s)
+ s = s[n:]
+ chunk = chunk[1:]
+ // We can't end right after '[', we're expecting at least
+ // a closing bracket and possibly a caret.
+ if len(chunk) == 0 {
+ err = filepath.ErrBadPattern
+ return
+ }
+ // possibly negated
+ negated := chunk[0] == '^'
+ if negated {
+ chunk = chunk[1:]
+ }
+ // parse all ranges
+ match := false
+ nrange := 0
+ for {
+ if len(chunk) > 0 && chunk[0] == ']' && nrange > 0 {
+ chunk = chunk[1:]
+ break
+ }
+ var lo, hi rune
+ if lo, chunk, err = getEsc(chunk); err != nil {
+ return
+ }
+ hi = lo
+ if chunk[0] == '-' {
+ if hi, chunk, err = getEsc(chunk[1:]); err != nil {
+ return
+ }
+ }
+ if lo <= r && r <= hi {
+ match = true
+ }
+ nrange++
+ }
+ if match == negated {
+ return
+ }
+
+ case '?':
+ //if s[0] == Separator {
+ // return
+ //}
+ _, n := utf8.DecodeRuneInString(s)
+ s = s[n:]
+ chunk = chunk[1:]
+
+ case '\\':
+ chunk = chunk[1:]
+ if len(chunk) == 0 {
+ err = filepath.ErrBadPattern
+ return
+ }
+ fallthrough
+
+ default:
+ if chunk[0] != s[0] {
+ return
+ }
+ s = s[1:]
+ chunk = chunk[1:]
+ }
+ }
+ return s, true, nil
+}
+
+// getEsc gets a possibly-escaped character from chunk, for a character class.
+func getEsc(chunk string) (r rune, nchunk string, err error) {
+ if len(chunk) == 0 || chunk[0] == '-' || chunk[0] == ']' {
+ err = filepath.ErrBadPattern
+ return
+ }
+ if chunk[0] == '\\' {
+ chunk = chunk[1:]
+ if len(chunk) == 0 {
+ err = filepath.ErrBadPattern
+ return
+ }
+ }
+ r, n := utf8.DecodeRuneInString(chunk)
+ if r == utf8.RuneError && n == 1 {
+ err = filepath.ErrBadPattern
+ }
+ nchunk = chunk[n:]
+ if len(nchunk) == 0 {
+ err = filepath.ErrBadPattern
+ }
+ return
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/glob_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/glob_test.go
new file mode 100644
index 000000000..09d456105
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/glob_test.go
@@ -0,0 +1,97 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNewGlobMatcher(t *testing.T) {
+ cases := []struct {
+ expr string
+ matcher Matcher
+ }{
+ {"", stringFullMatcher("")},
+ {"a", stringFullMatcher("a")},
+ {"a*b", globMatcher("a*b")},
+ {`a*\b`, globMatcher(`a*\b`)},
+ {`a\[`, stringFullMatcher(`a[`)},
+ {`ab\`, nil},
+ {`ab[`, nil},
+ {`ab]`, stringFullMatcher("ab]")},
+ }
+ for _, c := range cases {
+ t.Run(c.expr, func(t *testing.T) {
+ m, err := NewGlobMatcher(c.expr)
+ if c.matcher != nil {
+ assert.NoError(t, err)
+ assert.Equal(t, c.matcher, m)
+ } else {
+ assert.Error(t, err)
+ }
+ })
+ }
+}
+
+func TestGlobMatcher_MatchString(t *testing.T) {
+
+ cases := []struct {
+ expected bool
+ expr string
+ line string
+ }{
+ {true, "/a/*/d", "/a/b/c/d"},
+ {true, "foo*", "foo123"},
+ {true, "*foo*", "123foo123"},
+ {true, "*foo", "123foo"},
+ {true, "foo*bar", "foobar"},
+ {true, "foo*bar", "foo baz bar"},
+ {true, "a[bc]d", "abd"},
+ {true, "a[^bc]d", "add"},
+ {true, "a??d", "abcd"},
+ {true, `a\??d`, "a?cd"},
+ {true, "a[b-z]d", "abd"},
+ {false, "/a/*/d", "a/b/c/d"},
+ {false, "/a/*/d", "This will fail!"},
+ }
+
+ for _, c := range cases {
+ t.Run(c.line, func(t *testing.T) {
+ m := globMatcher(c.expr)
+ assert.Equal(t, c.expected, m.Match([]byte(c.line)))
+ assert.Equal(t, c.expected, m.MatchString(c.line))
+ })
+ }
+}
+
+func BenchmarkGlob_MatchString(b *testing.B) {
+ benchmarks := []struct {
+ expr string
+ test string
+ }{
+ {"", ""},
+ {"abc", "abcd"},
+ {"*abc", "abcd"},
+ {"abc*", "abcd"},
+ {"*abc*", "abcd"},
+ {"[a-z]", "abcd"},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.expr+"_raw", func(b *testing.B) {
+ m := globMatcher(bm.expr)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ m.MatchString(bm.test)
+ }
+ })
+ b.Run(bm.expr+"_optimized", func(b *testing.B) {
+ m, _ := NewGlobMatcher(bm.expr)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ m.MatchString(bm.test)
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/logical.go b/src/go/collectors/go.d.plugin/pkg/matcher/logical.go
new file mode 100644
index 000000000..af07be8f4
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/logical.go
@@ -0,0 +1,101 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+type (
+ trueMatcher struct{}
+ falseMatcher struct{}
+ andMatcher struct{ lhs, rhs Matcher }
+ orMatcher struct{ lhs, rhs Matcher }
+ negMatcher struct{ Matcher }
+)
+
+var (
+ matcherT trueMatcher
+ matcherF falseMatcher
+)
+
+// TRUE returns a matcher which always returns true
+func TRUE() Matcher {
+ return matcherT
+}
+
+// FALSE returns a matcher which always returns false
+func FALSE() Matcher {
+ return matcherF
+}
+
+// Not returns a matcher which positive the sub-matcher's result
+func Not(m Matcher) Matcher {
+ switch m {
+ case TRUE():
+ return FALSE()
+ case FALSE():
+ return TRUE()
+ default:
+ return negMatcher{m}
+ }
+}
+
+// And returns a matcher which returns true only if all of it's sub-matcher return true
+func And(lhs, rhs Matcher, others ...Matcher) Matcher {
+ var matcher Matcher
+ switch lhs {
+ case TRUE():
+ matcher = rhs
+ case FALSE():
+ matcher = FALSE()
+ default:
+ switch rhs {
+ case TRUE():
+ matcher = lhs
+ case FALSE():
+ matcher = FALSE()
+ default:
+ matcher = andMatcher{lhs, rhs}
+ }
+ }
+ if len(others) > 0 {
+ return And(matcher, others[0], others[1:]...)
+ }
+ return matcher
+}
+
+// Or returns a matcher which returns true if any of it's sub-matcher return true
+func Or(lhs, rhs Matcher, others ...Matcher) Matcher {
+ var matcher Matcher
+ switch lhs {
+ case TRUE():
+ matcher = TRUE()
+ case FALSE():
+ matcher = rhs
+ default:
+ switch rhs {
+ case TRUE():
+ matcher = TRUE()
+ case FALSE():
+ matcher = lhs
+ default:
+ matcher = orMatcher{lhs, rhs}
+ }
+ }
+ if len(others) > 0 {
+ return Or(matcher, others[0], others[1:]...)
+ }
+ return matcher
+}
+
+func (trueMatcher) Match(_ []byte) bool { return true }
+func (trueMatcher) MatchString(_ string) bool { return true }
+
+func (falseMatcher) Match(_ []byte) bool { return false }
+func (falseMatcher) MatchString(_ string) bool { return false }
+
+func (m andMatcher) Match(b []byte) bool { return m.lhs.Match(b) && m.rhs.Match(b) }
+func (m andMatcher) MatchString(s string) bool { return m.lhs.MatchString(s) && m.rhs.MatchString(s) }
+
+func (m orMatcher) Match(b []byte) bool { return m.lhs.Match(b) || m.rhs.Match(b) }
+func (m orMatcher) MatchString(s string) bool { return m.lhs.MatchString(s) || m.rhs.MatchString(s) }
+
+func (m negMatcher) Match(b []byte) bool { return !m.Matcher.Match(b) }
+func (m negMatcher) MatchString(s string) bool { return !m.Matcher.MatchString(s) }
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/logical_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/logical_test.go
new file mode 100644
index 000000000..64491f1ad
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/logical_test.go
@@ -0,0 +1,97 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTRUE(t *testing.T) {
+ assert.True(t, TRUE().Match(nil))
+ assert.True(t, TRUE().MatchString(""))
+}
+
+func TestFALSE(t *testing.T) {
+ assert.False(t, FALSE().Match(nil))
+ assert.False(t, FALSE().MatchString(""))
+}
+
+func TestAnd(t *testing.T) {
+ assert.Equal(t,
+ matcherF,
+ And(FALSE(), stringFullMatcher("")))
+ assert.Equal(t,
+ matcherF,
+ And(stringFullMatcher(""), FALSE()))
+
+ assert.Equal(t,
+ stringFullMatcher(""),
+ And(TRUE(), stringFullMatcher("")))
+ assert.Equal(t,
+ stringFullMatcher(""),
+ And(stringFullMatcher(""), TRUE()))
+
+ assert.Equal(t,
+ andMatcher{stringPartialMatcher("a"), stringPartialMatcher("b")},
+ And(stringPartialMatcher("a"), stringPartialMatcher("b")))
+
+ assert.Equal(t,
+ andMatcher{
+ andMatcher{stringPartialMatcher("a"), stringPartialMatcher("b")},
+ stringPartialMatcher("c"),
+ },
+ And(stringPartialMatcher("a"), stringPartialMatcher("b"), stringPartialMatcher("c")))
+}
+
+func TestOr(t *testing.T) {
+ assert.Equal(t,
+ stringFullMatcher(""),
+ Or(FALSE(), stringFullMatcher("")))
+ assert.Equal(t,
+ stringFullMatcher(""),
+ Or(stringFullMatcher(""), FALSE()))
+
+ assert.Equal(t,
+ TRUE(),
+ Or(TRUE(), stringFullMatcher("")))
+ assert.Equal(t,
+ TRUE(),
+ Or(stringFullMatcher(""), TRUE()))
+
+ assert.Equal(t,
+ orMatcher{stringPartialMatcher("a"), stringPartialMatcher("b")},
+ Or(stringPartialMatcher("a"), stringPartialMatcher("b")))
+
+ assert.Equal(t,
+ orMatcher{
+ orMatcher{stringPartialMatcher("a"), stringPartialMatcher("b")},
+ stringPartialMatcher("c"),
+ },
+ Or(stringPartialMatcher("a"), stringPartialMatcher("b"), stringPartialMatcher("c")))
+}
+
+func TestAndMatcher_Match(t *testing.T) {
+ and := andMatcher{
+ stringPrefixMatcher("a"),
+ stringSuffixMatcher("c"),
+ }
+ assert.True(t, and.Match([]byte("abc")))
+ assert.True(t, and.MatchString("abc"))
+}
+
+func TestOrMatcher_Match(t *testing.T) {
+ or := orMatcher{
+ stringPrefixMatcher("a"),
+ stringPrefixMatcher("c"),
+ }
+ assert.True(t, or.Match([]byte("aaa")))
+ assert.True(t, or.MatchString("ccc"))
+}
+
+func TestNegMatcher_Match(t *testing.T) {
+ neg := negMatcher{stringPrefixMatcher("a")}
+ assert.False(t, neg.Match([]byte("aaa")))
+ assert.True(t, neg.MatchString("ccc"))
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/matcher.go b/src/go/collectors/go.d.plugin/pkg/matcher/matcher.go
new file mode 100644
index 000000000..76d903325
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/matcher.go
@@ -0,0 +1,149 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "errors"
+ "fmt"
+ "regexp"
+)
+
+type (
+ // Matcher is an interface that wraps MatchString method.
+ Matcher interface {
+ // Match performs match against given []byte
+ Match(b []byte) bool
+ // MatchString performs match against given string
+ MatchString(string) bool
+ }
+
+ // Format matcher format
+ Format string
+)
+
+const (
+ // FmtString is a string match format.
+ FmtString Format = "string"
+ // FmtGlob is a glob match format.
+ FmtGlob Format = "glob"
+ // FmtRegExp is a regex match format.
+ FmtRegExp Format = "regexp"
+ // FmtSimplePattern is a simple pattern match format
+ // https://docs.netdata.cloud/libnetdata/simple_pattern/
+ FmtSimplePattern Format = "simple_patterns"
+
+ // Separator is a separator between match format and expression.
+ Separator = ":"
+)
+
+const (
+ symString = "="
+ symGlob = "*"
+ symRegExp = "~"
+)
+
+var (
+ reShortSyntax = regexp.MustCompile(`(?s)^(!)?(.)\s*(.*)$`)
+ reLongSyntax = regexp.MustCompile(`(?s)^(!)?([^:]+):(.*)$`)
+
+ errNotShortSyntax = errors.New("not short syntax")
+)
+
+// Must is a helper that wraps a call to a function returning (Matcher, error) and panics if the error is non-nil.
+// It is intended for use in variable initializations such as
+//
+// var m = matcher.Must(matcher.New(matcher.FmtString, "hello world"))
+func Must(m Matcher, err error) Matcher {
+ if err != nil {
+ panic(err)
+ }
+ return m
+}
+
+// New create a matcher
+func New(format Format, expr string) (Matcher, error) {
+ switch format {
+ case FmtString:
+ return NewStringMatcher(expr, true, true)
+ case FmtGlob:
+ return NewGlobMatcher(expr)
+ case FmtRegExp:
+ return NewRegExpMatcher(expr)
+ case FmtSimplePattern:
+ return NewSimplePatternsMatcher(expr)
+ default:
+ return nil, fmt.Errorf("unsupported matcher format: '%s'", format)
+ }
+}
+
+// Parse parses line and returns appropriate matcher based on matched format.
+//
+// Short Syntax
+//
+// <line> ::= [ <not> ] <format> <space> <expr>
+// <not> ::= '!'
+// negative expression
+// <format> ::= [ '=', '~', '*' ]
+// '=' means string match
+// '~' means regexp match
+// '*' means glob match
+// <space> ::= { ' ' | '\t' | '\n' | '\n' | '\r' }
+// <expr> ::= any string
+//
+// Long Syntax
+//
+// <line> ::= [ <not> ] <format> <separator> <expr>
+// <format> ::= [ 'string' | 'glob' | 'regexp' | 'simple_patterns' ]
+// <not> ::= '!'
+// negative expression
+// <separator> ::= ':'
+// <expr> ::= any string
+func Parse(line string) (Matcher, error) {
+ matcher, err := parseShortFormat(line)
+ if err == nil {
+ return matcher, nil
+ }
+ return parseLongSyntax(line)
+}
+
+func parseShortFormat(line string) (Matcher, error) {
+ m := reShortSyntax.FindStringSubmatch(line)
+ if m == nil {
+ return nil, errNotShortSyntax
+ }
+ var format Format
+ switch m[2] {
+ case symString:
+ format = FmtString
+ case symGlob:
+ format = FmtGlob
+ case symRegExp:
+ format = FmtRegExp
+ default:
+ return nil, fmt.Errorf("invalid short syntax: unknown symbol '%s'", m[2])
+ }
+ expr := m[3]
+ matcher, err := New(format, expr)
+ if err != nil {
+ return nil, err
+ }
+ if m[1] != "" {
+ matcher = Not(matcher)
+ }
+ return matcher, nil
+}
+
+func parseLongSyntax(line string) (Matcher, error) {
+ m := reLongSyntax.FindStringSubmatch(line)
+ if m == nil {
+ return nil, fmt.Errorf("invalid syntax")
+ }
+ matcher, err := New(Format(m[2]), m[3])
+ if err != nil {
+ return nil, err
+ }
+ if m[1] != "" {
+ matcher = Not(matcher)
+ }
+ return matcher, nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/matcher_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/matcher_test.go
new file mode 100644
index 000000000..f304d983d
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/matcher_test.go
@@ -0,0 +1,122 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "log"
+ "reflect"
+ "regexp"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParse(t *testing.T) {
+ tests := []struct {
+ valid bool
+ line string
+ matcher Matcher
+ }{
+ {false, "", nil},
+ {false, "abc", nil},
+ {false, `~ abc\`, nil},
+ {false, `invalid_fmt:abc`, nil},
+
+ {true, "=", stringFullMatcher("")},
+ {true, "= ", stringFullMatcher("")},
+ {true, "=full", stringFullMatcher("full")},
+ {true, "= full", stringFullMatcher("full")},
+ {true, "= \t\ffull", stringFullMatcher("full")},
+
+ {true, "string:", stringFullMatcher("")},
+ {true, "string:full", stringFullMatcher("full")},
+
+ {true, "!=", Not(stringFullMatcher(""))},
+ {true, "!=full", Not(stringFullMatcher("full"))},
+ {true, "!= full", Not(stringFullMatcher("full"))},
+ {true, "!= \t\ffull", Not(stringFullMatcher("full"))},
+
+ {true, "!string:", Not(stringFullMatcher(""))},
+ {true, "!string:full", Not(stringFullMatcher("full"))},
+
+ {true, "~", TRUE()},
+ {true, "~ ", TRUE()},
+ {true, `~ ^$`, stringFullMatcher("")},
+ {true, "~ partial", stringPartialMatcher("partial")},
+ {true, `~ part\.ial`, stringPartialMatcher("part.ial")},
+ {true, "~ ^prefix", stringPrefixMatcher("prefix")},
+ {true, "~ suffix$", stringSuffixMatcher("suffix")},
+ {true, "~ ^full$", stringFullMatcher("full")},
+ {true, "~ [0-9]+", regexp.MustCompile(`[0-9]+`)},
+ {true, `~ part\s1`, regexp.MustCompile(`part\s1`)},
+
+ {true, "!~", FALSE()},
+ {true, "!~ ", FALSE()},
+ {true, "!~ partial", Not(stringPartialMatcher("partial"))},
+ {true, `!~ part\.ial`, Not(stringPartialMatcher("part.ial"))},
+ {true, "!~ ^prefix", Not(stringPrefixMatcher("prefix"))},
+ {true, "!~ suffix$", Not(stringSuffixMatcher("suffix"))},
+ {true, "!~ ^full$", Not(stringFullMatcher("full"))},
+ {true, "!~ [0-9]+", Not(regexp.MustCompile(`[0-9]+`))},
+
+ {true, `regexp:partial`, stringPartialMatcher("partial")},
+ {true, `!regexp:partial`, Not(stringPartialMatcher("partial"))},
+
+ {true, `*`, stringFullMatcher("")},
+ {true, `* foo`, stringFullMatcher("foo")},
+ {true, `* foo*`, stringPrefixMatcher("foo")},
+ {true, `* *foo`, stringSuffixMatcher("foo")},
+ {true, `* *foo*`, stringPartialMatcher("foo")},
+ {true, `* foo*bar`, globMatcher("foo*bar")},
+ {true, `* *foo*bar`, globMatcher("*foo*bar")},
+ {true, `* foo?bar`, globMatcher("foo?bar")},
+
+ {true, `!*`, Not(stringFullMatcher(""))},
+ {true, `!* foo`, Not(stringFullMatcher("foo"))},
+ {true, `!* foo*`, Not(stringPrefixMatcher("foo"))},
+ {true, `!* *foo`, Not(stringSuffixMatcher("foo"))},
+ {true, `!* *foo*`, Not(stringPartialMatcher("foo"))},
+ {true, `!* foo*bar`, Not(globMatcher("foo*bar"))},
+ {true, `!* *foo*bar`, Not(globMatcher("*foo*bar"))},
+ {true, `!* foo?bar`, Not(globMatcher("foo?bar"))},
+
+ {true, "glob:foo*bar", globMatcher("foo*bar")},
+ {true, "!glob:foo*bar", Not(globMatcher("foo*bar"))},
+
+ {true, `simple_patterns:`, FALSE()},
+ {true, `simple_patterns: `, FALSE()},
+ {true, `simple_patterns: foo`, simplePatternsMatcher{
+ {stringFullMatcher("foo"), true},
+ }},
+ {true, `simple_patterns: !foo`, simplePatternsMatcher{
+ {stringFullMatcher("foo"), false},
+ }},
+ }
+ for _, test := range tests {
+ t.Run(test.line, func(t *testing.T) {
+ m, err := Parse(test.line)
+ if test.valid {
+ require.NoError(t, err)
+ if test.matcher != nil {
+ log.Printf("%s %#v", reflect.TypeOf(m).Name(), m)
+ assert.Equal(t, test.matcher, m)
+ }
+ } else {
+ assert.Error(t, err)
+ }
+ })
+ }
+}
+
+func TestMust(t *testing.T) {
+ assert.NotPanics(t, func() {
+ m := Must(New(FmtRegExp, `[0-9]+`))
+ assert.NotNil(t, m)
+ })
+
+ assert.Panics(t, func() {
+ Must(New(FmtRegExp, `[0-9]+\`))
+ })
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/regexp.go b/src/go/collectors/go.d.plugin/pkg/matcher/regexp.go
new file mode 100644
index 000000000..3a297f3b3
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/regexp.go
@@ -0,0 +1,60 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import "regexp"
+
+// NewRegExpMatcher create new matcher with RegExp format
+func NewRegExpMatcher(expr string) (Matcher, error) {
+ switch expr {
+ case "", "^", "$":
+ return TRUE(), nil
+ case "^$", "$^":
+ return NewStringMatcher("", true, true)
+ }
+ size := len(expr)
+ chars := []rune(expr)
+ var startWith, endWith bool
+ startIdx := 0
+ endIdx := size - 1
+ if chars[startIdx] == '^' {
+ startWith = true
+ startIdx = 1
+ }
+ if chars[endIdx] == '$' {
+ endWith = true
+ endIdx--
+ }
+
+ unescapedExpr := make([]rune, 0, endIdx-startIdx+1)
+ for i := startIdx; i <= endIdx; i++ {
+ ch := chars[i]
+ if ch == '\\' {
+ if i == endIdx { // end with '\' => invalid format
+ return regexp.Compile(expr)
+ }
+ nextCh := chars[i+1]
+ if !isRegExpMeta(nextCh) { // '\' + mon-meta char => special meaning
+ return regexp.Compile(expr)
+ }
+ unescapedExpr = append(unescapedExpr, nextCh)
+ i++
+ } else if isRegExpMeta(ch) {
+ return regexp.Compile(expr)
+ } else {
+ unescapedExpr = append(unescapedExpr, ch)
+ }
+ }
+
+ return NewStringMatcher(string(unescapedExpr), startWith, endWith)
+}
+
+// isRegExpMeta reports whether byte b needs to be escaped by QuoteMeta.
+func isRegExpMeta(b rune) bool {
+ switch b {
+ case '\\', '.', '+', '*', '?', '(', ')', '|', '[', ']', '{', '}', '^', '$':
+ return true
+ default:
+ return false
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/regexp_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/regexp_test.go
new file mode 100644
index 000000000..fe644747b
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/regexp_test.go
@@ -0,0 +1,66 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "regexp"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRegExpMatch_Match(t *testing.T) {
+ m := regexp.MustCompile("[0-9]+")
+
+ cases := []struct {
+ expected bool
+ line string
+ }{
+ {
+ expected: true,
+ line: "2019",
+ },
+ {
+ expected: true,
+ line: "It's over 9000!",
+ },
+ {
+ expected: false,
+ line: "This will never fail!",
+ },
+ }
+
+ for _, c := range cases {
+ assert.Equal(t, c.expected, m.MatchString(c.line))
+ }
+}
+
+func BenchmarkRegExp_MatchString(b *testing.B) {
+ benchmarks := []struct {
+ expr string
+ test string
+ }{
+ {"", ""},
+ {"abc", "abcd"},
+ {"^abc", "abcd"},
+ {"abc$", "abcd"},
+ {"^abc$", "abcd"},
+ {"[a-z]+", "abcd"},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.expr+"_raw", func(b *testing.B) {
+ m := regexp.MustCompile(bm.expr)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ m.MatchString(bm.test)
+ }
+ })
+ b.Run(bm.expr+"_optimized", func(b *testing.B) {
+ m, _ := NewRegExpMatcher(bm.expr)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ m.MatchString(bm.test)
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns.go b/src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns.go
new file mode 100644
index 000000000..0c1d69fc6
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns.go
@@ -0,0 +1,65 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "strings"
+)
+
+type (
+ simplePatternTerm struct {
+ matcher Matcher
+ positive bool
+ }
+
+ // simplePatternsMatcher patterns.
+ simplePatternsMatcher []simplePatternTerm
+)
+
+// NewSimplePatternsMatcher creates new simple patterns. It returns error in case one of patterns has bad syntax.
+func NewSimplePatternsMatcher(expr string) (Matcher, error) {
+ var ps simplePatternsMatcher
+
+ for _, pattern := range strings.Fields(expr) {
+ if err := ps.add(pattern); err != nil {
+ return nil, err
+ }
+ }
+ if len(ps) == 0 {
+ return FALSE(), nil
+ }
+ return ps, nil
+}
+
+func (m *simplePatternsMatcher) add(term string) error {
+ p := simplePatternTerm{}
+ if term[0] == '!' {
+ p.positive = false
+ term = term[1:]
+ } else {
+ p.positive = true
+ }
+ matcher, err := NewGlobMatcher(term)
+ if err != nil {
+ return err
+ }
+
+ p.matcher = matcher
+ *m = append(*m, p)
+
+ return nil
+}
+
+func (m simplePatternsMatcher) Match(b []byte) bool {
+ return m.MatchString(string(b))
+}
+
+// MatchString matches.
+func (m simplePatternsMatcher) MatchString(line string) bool {
+ for _, p := range m {
+ if p.matcher.MatchString(line) {
+ return p.positive
+ }
+ }
+ return false
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns_test.go
new file mode 100644
index 000000000..016096d57
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/simple_patterns_test.go
@@ -0,0 +1,88 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewSimplePatternsMatcher(t *testing.T) {
+ tests := []struct {
+ expr string
+ expected Matcher
+ }{
+ {"", FALSE()},
+ {" ", FALSE()},
+ {"foo", simplePatternsMatcher{
+ {stringFullMatcher("foo"), true},
+ }},
+ {"!foo", simplePatternsMatcher{
+ {stringFullMatcher("foo"), false},
+ }},
+ {"foo bar", simplePatternsMatcher{
+ {stringFullMatcher("foo"), true},
+ {stringFullMatcher("bar"), true},
+ }},
+ {"*foobar* !foo* !*bar *", simplePatternsMatcher{
+ {stringPartialMatcher("foobar"), true},
+ {stringPrefixMatcher("foo"), false},
+ {stringSuffixMatcher("bar"), false},
+ {TRUE(), true},
+ }},
+ {`ab\`, nil},
+ }
+ for _, test := range tests {
+ t.Run(test.expr, func(t *testing.T) {
+ matcher, err := NewSimplePatternsMatcher(test.expr)
+ if test.expected == nil {
+ assert.Error(t, err)
+ } else {
+ assert.Equal(t, test.expected, matcher)
+ }
+ })
+ }
+}
+
+func TestSimplePatterns_Match(t *testing.T) {
+ m, err := NewSimplePatternsMatcher("*foobar* !foo* !*bar *")
+
+ require.NoError(t, err)
+
+ cases := []struct {
+ expected bool
+ line string
+ }{
+ {
+ expected: true,
+ line: "hello world",
+ },
+ {
+ expected: false,
+ line: "hello world bar",
+ },
+ {
+ expected: true,
+ line: "hello world foobar",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.line, func(t *testing.T) {
+ assert.Equal(t, c.expected, m.MatchString(c.line))
+ assert.Equal(t, c.expected, m.Match([]byte(c.line)))
+ })
+ }
+}
+
+func TestSimplePatterns_Match2(t *testing.T) {
+ m, err := NewSimplePatternsMatcher("*foobar")
+
+ require.NoError(t, err)
+
+ assert.True(t, m.MatchString("foobar"))
+ assert.True(t, m.MatchString("foo foobar"))
+ assert.False(t, m.MatchString("foobar baz"))
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/string.go b/src/go/collectors/go.d.plugin/pkg/matcher/string.go
new file mode 100644
index 000000000..25827d0d8
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/string.go
@@ -0,0 +1,48 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "bytes"
+ "strings"
+)
+
+type (
+ // stringFullMatcher implements Matcher, it uses "==" to match.
+ stringFullMatcher string
+
+ // stringPartialMatcher implements Matcher, it uses strings.Contains to match.
+ stringPartialMatcher string
+
+ // stringPrefixMatcher implements Matcher, it uses strings.HasPrefix to match.
+ stringPrefixMatcher string
+
+ // stringSuffixMatcher implements Matcher, it uses strings.HasSuffix to match.
+ stringSuffixMatcher string
+)
+
+// NewStringMatcher create a new matcher with string format
+func NewStringMatcher(s string, startWith, endWith bool) (Matcher, error) {
+ switch {
+ case startWith && endWith:
+ return stringFullMatcher(s), nil
+ case startWith && !endWith:
+ return stringPrefixMatcher(s), nil
+ case !startWith && endWith:
+ return stringSuffixMatcher(s), nil
+ default:
+ return stringPartialMatcher(s), nil
+ }
+}
+
+func (m stringFullMatcher) Match(b []byte) bool { return string(m) == string(b) }
+func (m stringFullMatcher) MatchString(line string) bool { return string(m) == line }
+
+func (m stringPartialMatcher) Match(b []byte) bool { return bytes.Contains(b, []byte(m)) }
+func (m stringPartialMatcher) MatchString(line string) bool { return strings.Contains(line, string(m)) }
+
+func (m stringPrefixMatcher) Match(b []byte) bool { return bytes.HasPrefix(b, []byte(m)) }
+func (m stringPrefixMatcher) MatchString(line string) bool { return strings.HasPrefix(line, string(m)) }
+
+func (m stringSuffixMatcher) Match(b []byte) bool { return bytes.HasSuffix(b, []byte(m)) }
+func (m stringSuffixMatcher) MatchString(line string) bool { return strings.HasSuffix(line, string(m)) }
diff --git a/src/go/collectors/go.d.plugin/pkg/matcher/string_test.go b/src/go/collectors/go.d.plugin/pkg/matcher/string_test.go
new file mode 100644
index 000000000..1694efbd0
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/matcher/string_test.go
@@ -0,0 +1,62 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package matcher
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+var stringMatcherTestCases = []struct {
+ line string
+ expr string
+ full, prefix, suffix, partial bool
+}{
+ {"", "", true, true, true, true},
+ {"abc", "", false, true, true, true},
+ {"power", "pow", false, true, false, true},
+ {"netdata", "data", false, false, true, true},
+ {"abc", "def", false, false, false, false},
+ {"soon", "o", false, false, false, true},
+}
+
+func TestStringFullMatcher_MatchString(t *testing.T) {
+ for _, c := range stringMatcherTestCases {
+ t.Run(c.line, func(t *testing.T) {
+ m := stringFullMatcher(c.expr)
+ assert.Equal(t, c.full, m.Match([]byte(c.line)))
+ assert.Equal(t, c.full, m.MatchString(c.line))
+ })
+ }
+}
+
+func TestStringPrefixMatcher_MatchString(t *testing.T) {
+ for _, c := range stringMatcherTestCases {
+ t.Run(c.line, func(t *testing.T) {
+ m := stringPrefixMatcher(c.expr)
+ assert.Equal(t, c.prefix, m.Match([]byte(c.line)))
+ assert.Equal(t, c.prefix, m.MatchString(c.line))
+ })
+ }
+}
+
+func TestStringSuffixMatcher_MatchString(t *testing.T) {
+ for _, c := range stringMatcherTestCases {
+ t.Run(c.line, func(t *testing.T) {
+ m := stringSuffixMatcher(c.expr)
+ assert.Equal(t, c.suffix, m.Match([]byte(c.line)))
+ assert.Equal(t, c.suffix, m.MatchString(c.line))
+ })
+ }
+}
+
+func TestStringPartialMatcher_MatchString(t *testing.T) {
+ for _, c := range stringMatcherTestCases {
+ t.Run(c.line, func(t *testing.T) {
+ m := stringPartialMatcher(c.expr)
+ assert.Equal(t, c.partial, m.Match([]byte(c.line)))
+ assert.Equal(t, c.partial, m.MatchString(c.line))
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/counter.go b/src/go/collectors/go.d.plugin/pkg/metrics/counter.go
new file mode 100644
index 000000000..7231fc7a4
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/counter.go
@@ -0,0 +1,93 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "errors"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+)
+
+type (
+ // Counter is a Metric that represents a single numerical bits that only ever
+ // goes up. That implies that it cannot be used to count items whose number can
+ // also go down, e.g. the number of currently running goroutines. Those
+ // "counters" are represented by Gauges.
+ //
+ // A Counter is typically used to count requests served, tasks completed, errors
+ // occurred, etc.
+ Counter struct {
+ valInt int64
+ valFloat float64
+ }
+
+ // CounterVec is a Collector that bundles a set of Counters which have different values for their names.
+ // This is used if you want to count the same thing partitioned by various dimensions
+ // (e.g. number of HTTP requests, partitioned by response code and method).
+ //
+ // Create instances with NewCounterVec.
+ CounterVec map[string]*Counter
+)
+
+var (
+ _ stm.Value = Counter{}
+ _ stm.Value = CounterVec{}
+)
+
+// WriteTo writes its value into given map.
+func (c Counter) WriteTo(rv map[string]int64, key string, mul, div int) {
+ rv[key] = int64(c.Value() * float64(mul) / float64(div))
+}
+
+// Value gets current counter.
+func (c Counter) Value() float64 {
+ return float64(c.valInt) + c.valFloat
+}
+
+// Inc increments the counter by 1. Use Add to increment it by arbitrary
+// non-negative values.
+func (c *Counter) Inc() {
+ c.valInt++
+}
+
+// Add adds the given bits to the counter. It panics if the value is < 0.
+func (c *Counter) Add(v float64) {
+ if v < 0 {
+ panic(errors.New("counter cannot decrease in value"))
+ }
+ val := int64(v)
+ if float64(val) == v {
+ c.valInt += val
+ return
+ }
+ c.valFloat += v
+}
+
+// NewCounterVec creates a new CounterVec
+func NewCounterVec() CounterVec {
+ return CounterVec{}
+}
+
+// WriteTo writes its value into given map.
+func (c CounterVec) WriteTo(rv map[string]int64, key string, mul, div int) {
+ for name, value := range c {
+ rv[key+"_"+name] = int64(value.Value() * float64(mul) / float64(div))
+ }
+}
+
+// Get gets counter instance by name
+func (c CounterVec) Get(name string) *Counter {
+ item, _ := c.GetP(name)
+ return item
+}
+
+// GetP gets counter instance by name
+func (c CounterVec) GetP(name string) (counter *Counter, ok bool) {
+ counter, ok = c[name]
+ if ok {
+ return
+ }
+ counter = &Counter{}
+ c[name] = counter
+ return
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/counter_test.go b/src/go/collectors/go.d.plugin/pkg/metrics/counter_test.go
new file mode 100644
index 000000000..61f50501a
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/counter_test.go
@@ -0,0 +1,105 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCounter_WriteTo(t *testing.T) {
+ c := Counter{}
+ c.Inc()
+ c.Inc()
+ c.Inc()
+ c.Add(0.14)
+ m := map[string]int64{}
+ c.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 1)
+ assert.EqualValues(t, 314, m["pi"])
+}
+
+func TestCounterVec_WriteTo(t *testing.T) {
+ c := NewCounterVec()
+ c.Get("foo").Inc()
+ c.Get("foo").Inc()
+ c.Get("bar").Inc()
+ c.Get("bar").Add(0.14)
+
+ m := map[string]int64{}
+ c.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 2)
+ assert.EqualValues(t, 200, m["pi_foo"])
+ assert.EqualValues(t, 114, m["pi_bar"])
+}
+
+func TestCounter_Inc(t *testing.T) {
+ c := Counter{}
+ c.Inc()
+ assert.Equal(t, 1.0, c.Value())
+ c.Inc()
+ assert.Equal(t, 2.0, c.Value())
+}
+
+func TestCounter_Add(t *testing.T) {
+ c := Counter{}
+ c.Add(3.14)
+ assert.InDelta(t, 3.14, c.Value(), 0.0001)
+ c.Add(2)
+ assert.InDelta(t, 5.14, c.Value(), 0.0001)
+ assert.Panics(t, func() {
+ c.Add(-1)
+ })
+}
+
+func BenchmarkCounter_Add(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ value float64
+ }{
+ {"int", 1},
+ {"float", 3.14},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.B) {
+ var c Counter
+ for i := 0; i < b.N; i++ {
+ c.Add(bm.value)
+ }
+ })
+ }
+}
+
+func BenchmarkCounter_Inc(b *testing.B) {
+ var c Counter
+ for i := 0; i < b.N; i++ {
+ c.Inc()
+ }
+}
+
+func BenchmarkCounterVec_Inc(b *testing.B) {
+ c := NewCounterVec()
+ for i := 0; i < b.N; i++ {
+ c.Get("foo").Inc()
+ }
+}
+
+func BenchmarkCounter_Value(b *testing.B) {
+ var c Counter
+ c.Inc()
+ c.Add(3.14)
+ for i := 0; i < b.N; i++ {
+ c.Value()
+ }
+}
+
+func BenchmarkCounter_WriteTo(b *testing.B) {
+ var c Counter
+ c.Inc()
+ c.Add(3.14)
+ m := map[string]int64{}
+ for i := 0; i < b.N; i++ {
+ c.WriteTo(m, "pi", 100, 1)
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/gauge.go b/src/go/collectors/go.d.plugin/pkg/metrics/gauge.go
new file mode 100644
index 000000000..6f0930f66
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/gauge.go
@@ -0,0 +1,103 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "time"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+)
+
+type (
+ // Gauge is a Metric that represents a single numerical value that can
+ // arbitrarily go up and down.
+ //
+ // A Gauge is typically used for measured values like temperatures or current
+ // memory usage, but also "counts" that can go up and down, like the number of
+ // running goroutines.
+ Gauge float64
+
+ // GaugeVec is a Collector that bundles a set of Gauges which have different values for their names.
+ // This is used if you want to count the same thing partitioned by various dimensions
+ //
+ // Create instances with NewGaugeVec.
+ GaugeVec map[string]*Gauge
+)
+
+var (
+ _ stm.Value = Gauge(0)
+ _ stm.Value = GaugeVec{}
+)
+
+// WriteTo writes its value into given map.
+func (g Gauge) WriteTo(rv map[string]int64, key string, mul, div int) {
+ rv[key] = int64(float64(g) * float64(mul) / float64(div))
+}
+
+// Value gets current counter.
+func (g Gauge) Value() float64 {
+ return float64(g)
+}
+
+// Set sets the atomicGauge to an arbitrary bits.
+func (g *Gauge) Set(v float64) {
+ *g = Gauge(v)
+}
+
+// Inc increments the atomicGauge by 1. Use Add to increment it by arbitrary
+// values.
+func (g *Gauge) Inc() {
+ *g++
+}
+
+// Dec decrements the atomicGauge by 1. Use Sub to decrement it by arbitrary
+// values.
+func (g *Gauge) Dec() {
+ *g--
+}
+
+// Add adds the given bits to the atomicGauge. (The bits can be negative,
+// resulting in a decrease of the atomicGauge.)
+func (g *Gauge) Add(delta float64) {
+ *g += Gauge(delta)
+}
+
+// Sub subtracts the given bits from the atomicGauge. (The bits can be
+// negative, resulting in an increase of the atomicGauge.)
+func (g *Gauge) Sub(delta float64) {
+ *g -= Gauge(delta)
+}
+
+// SetToCurrentTime sets the atomicGauge to the current Unix time in second.
+func (g *Gauge) SetToCurrentTime() {
+ *g = Gauge(time.Now().UnixNano()) / 1e9
+}
+
+// NewGaugeVec creates a new GaugeVec
+func NewGaugeVec() GaugeVec {
+ return GaugeVec{}
+}
+
+// WriteTo writes its value into given map.
+func (g GaugeVec) WriteTo(rv map[string]int64, key string, mul, div int) {
+ for name, value := range g {
+ rv[key+"_"+name] = int64(value.Value() * float64(mul) / float64(div))
+ }
+}
+
+// Get gets counter instance by name
+func (g GaugeVec) Get(name string) *Gauge {
+ item, _ := g.GetP(name)
+ return item
+}
+
+// GetP gets counter instance by name
+func (g GaugeVec) GetP(name string) (gauge *Gauge, ok bool) {
+ gauge, ok = g[name]
+ if ok {
+ return
+ }
+ gauge = new(Gauge)
+ g[name] = gauge
+ return
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/gauge_test.go b/src/go/collectors/go.d.plugin/pkg/metrics/gauge_test.go
new file mode 100644
index 000000000..8940e330e
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/gauge_test.go
@@ -0,0 +1,129 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGauge_Set(t *testing.T) {
+ var g Gauge
+ assert.Equal(t, 0.0, g.Value())
+ g.Set(100)
+ assert.Equal(t, 100.0, g.Value())
+ g.Set(200)
+ assert.Equal(t, 200.0, g.Value())
+}
+
+func TestGauge_Add(t *testing.T) {
+ var g Gauge
+ assert.Equal(t, 0.0, g.Value())
+ g.Add(100)
+ assert.Equal(t, 100.0, g.Value())
+ g.Add(200)
+ assert.Equal(t, 300.0, g.Value())
+}
+func TestGauge_Sub(t *testing.T) {
+ var g Gauge
+ assert.Equal(t, 0.0, g.Value())
+ g.Sub(100)
+ assert.Equal(t, -100.0, g.Value())
+ g.Sub(200)
+ assert.Equal(t, -300.0, g.Value())
+}
+
+func TestGauge_Inc(t *testing.T) {
+ var g Gauge
+ assert.Equal(t, 0.0, g.Value())
+ g.Inc()
+ assert.Equal(t, 1.0, g.Value())
+}
+
+func TestGauge_Dec(t *testing.T) {
+ var g Gauge
+ assert.Equal(t, 0.0, g.Value())
+ g.Dec()
+ assert.Equal(t, -1.0, g.Value())
+}
+
+func TestGauge_SetToCurrentTime(t *testing.T) {
+ var g Gauge
+ g.SetToCurrentTime()
+ assert.InDelta(t, time.Now().Unix(), g.Value(), 1)
+}
+
+func TestGauge_WriteTo(t *testing.T) {
+ g := Gauge(3.14)
+ m := map[string]int64{}
+ g.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 1)
+ assert.EqualValues(t, 314, m["pi"])
+}
+
+func TestGaugeVec_WriteTo(t *testing.T) {
+ g := NewGaugeVec()
+ g.Get("foo").Inc()
+ g.Get("foo").Inc()
+ g.Get("bar").Inc()
+ g.Get("bar").Add(0.14)
+
+ m := map[string]int64{}
+ g.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 2)
+ assert.EqualValues(t, 200, m["pi_foo"])
+ assert.EqualValues(t, 114, m["pi_bar"])
+}
+
+func BenchmarkGauge_Add(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ value float64
+ }{
+ {"int", 1},
+ {"float", 3.14},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.B) {
+ var c Gauge
+ for i := 0; i < b.N; i++ {
+ c.Add(bm.value)
+ }
+ })
+ }
+}
+
+func BenchmarkGauge_Inc(b *testing.B) {
+ var c Gauge
+ for i := 0; i < b.N; i++ {
+ c.Inc()
+ }
+}
+
+func BenchmarkGauge_Set(b *testing.B) {
+ var c Gauge
+ for i := 0; i < b.N; i++ {
+ c.Set(3.14)
+ }
+}
+
+func BenchmarkGauge_Value(b *testing.B) {
+ var c Gauge
+ c.Inc()
+ c.Add(3.14)
+ for i := 0; i < b.N; i++ {
+ c.Value()
+ }
+}
+
+func BenchmarkGauge_WriteTo(b *testing.B) {
+ var c Gauge
+ c.Inc()
+ c.Add(3.14)
+ m := map[string]int64{}
+ for i := 0; i < b.N; i++ {
+ c.WriteTo(m, "pi", 100, 1)
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/histogram.go b/src/go/collectors/go.d.plugin/pkg/metrics/histogram.go
new file mode 100644
index 000000000..caabf09af
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/histogram.go
@@ -0,0 +1,171 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+)
+
+type (
+ // A Histogram counts individual observations from an event or sample stream in
+ // configurable buckets. Similar to a summary, it also provides a sum of
+ // observations and an observation count.
+ //
+ // Note that Histograms, in contrast to Summaries, can be aggregated.
+ // However, Histograms require the user to pre-define suitable
+ // buckets, and they are in general less accurate. The Observe method of a
+ // histogram has a very low performance overhead in comparison with the Observe
+ // method of a summary.
+ //
+ // To create histogram instances, use NewHistogram.
+ Histogram interface {
+ Observer
+ }
+
+ histogram struct {
+ buckets []int64
+ upperBounds []float64
+ sum float64
+ count int64
+ rangeBuckets bool
+ }
+)
+
+var (
+ _ stm.Value = histogram{}
+)
+
+// DefBuckets are the default histogram buckets. The default buckets are
+// tailored to broadly measure the response time (in seconds) of a network
+// service. Most likely, however, you will be required to define buckets
+// customized to your use case.
+var DefBuckets = []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10}
+
+// LinearBuckets creates 'count' buckets, each 'width' wide, where the lowest
+// bucket has an upper bound of 'start'. The final +Inf bucket is not counted
+// and not included in the returned slice. The returned slice is meant to be
+// used for the Buckets field of HistogramOpts.
+//
+// The function panics if 'count' is zero or negative.
+func LinearBuckets(start, width float64, count int) []float64 {
+ if count < 1 {
+ panic("LinearBuckets needs a positive count")
+ }
+ buckets := make([]float64, count)
+ for i := range buckets {
+ buckets[i] = start
+ start += width
+ }
+ return buckets
+}
+
+// ExponentialBuckets creates 'count' buckets, where the lowest bucket has an
+// upper bound of 'start' and each following bucket's upper bound is 'factor'
+// times the previous bucket's upper bound. The final +Inf bucket is not counted
+// and not included in the returned slice. The returned slice is meant to be
+// used for the Buckets field of HistogramOpts.
+//
+// The function panics if 'count' is 0 or negative, if 'start' is 0 or negative,
+// or if 'factor' is less than or equal 1.
+func ExponentialBuckets(start, factor float64, count int) []float64 {
+ if count < 1 {
+ panic("ExponentialBuckets needs a positive count")
+ }
+ if start <= 0 {
+ panic("ExponentialBuckets needs a positive start value")
+ }
+ if factor <= 1 {
+ panic("ExponentialBuckets needs a factor greater than 1")
+ }
+ buckets := make([]float64, count)
+ for i := range buckets {
+ buckets[i] = start
+ start *= factor
+ }
+ return buckets
+}
+
+// NewHistogram creates a new Histogram.
+func NewHistogram(buckets []float64) Histogram {
+ if len(buckets) == 0 {
+ buckets = DefBuckets
+ } else {
+ sort.Slice(buckets, func(i, j int) bool { return buckets[i] < buckets[j] })
+ }
+
+ return &histogram{
+ buckets: make([]int64, len(buckets)),
+ upperBounds: buckets,
+ count: 0,
+ sum: 0,
+ }
+}
+
+func NewHistogramWithRangeBuckets(buckets []float64) Histogram {
+ if len(buckets) == 0 {
+ buckets = DefBuckets
+ } else {
+ sort.Slice(buckets, func(i, j int) bool { return buckets[i] < buckets[j] })
+ }
+
+ return &histogram{
+ buckets: make([]int64, len(buckets)),
+ upperBounds: buckets,
+ count: 0,
+ sum: 0,
+ rangeBuckets: true,
+ }
+}
+
+// WriteTo writes its values into given map.
+// It adds those key-value pairs:
+//
+// ${key}_sum gauge, for sum of it's observed values
+// ${key}_count counter, for count of it's observed values (equals to +Inf bucket)
+// ${key}_bucket_1 counter, for 1st bucket count
+// ${key}_bucket_2 counter, for 2nd bucket count
+// ...
+// ${key}_bucket_N counter, for Nth bucket count
+func (h histogram) WriteTo(rv map[string]int64, key string, mul, div int) {
+ rv[key+"_sum"] = int64(h.sum * float64(mul) / float64(div))
+ rv[key+"_count"] = h.count
+ var conn int64
+ for i, bucket := range h.buckets {
+ name := fmt.Sprintf("%s_bucket_%d", key, i+1)
+ conn += bucket
+ if h.rangeBuckets {
+ rv[name] = bucket
+ } else {
+ rv[name] = conn
+ }
+ }
+ if h.rangeBuckets {
+ name := fmt.Sprintf("%s_bucket_inf", key)
+ rv[name] = h.count - conn
+ }
+}
+
+// Observe observes a value
+func (h *histogram) Observe(v float64) {
+ hotIdx := h.searchBucketIndex(v)
+ if hotIdx < len(h.buckets) {
+ h.buckets[hotIdx]++
+ }
+ h.sum += v
+ h.count++
+}
+
+func (h *histogram) searchBucketIndex(v float64) int {
+ if len(h.upperBounds) < 30 {
+ for i, upper := range h.upperBounds {
+ if upper >= v {
+ return i
+ }
+ }
+ return len(h.upperBounds)
+ }
+ return sort.SearchFloat64s(h.upperBounds, v)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/histogram_test.go b/src/go/collectors/go.d.plugin/pkg/metrics/histogram_test.go
new file mode 100644
index 000000000..91266915c
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/histogram_test.go
@@ -0,0 +1,136 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestLinearBuckets(t *testing.T) {
+ buckets := LinearBuckets(0, 1, 10)
+ assert.Len(t, buckets, 10)
+ assert.EqualValues(t, 0, buckets[0])
+ assert.EqualValues(t, 5.0, buckets[5])
+ assert.EqualValues(t, 9.0, buckets[9])
+
+ assert.Panics(t, func() {
+ LinearBuckets(0, 1, 0)
+ })
+}
+
+func TestExponentialBuckets(t *testing.T) {
+ buckets := ExponentialBuckets(1, 2, 10)
+ assert.Len(t, buckets, 10)
+ assert.EqualValues(t, 1, buckets[0])
+ assert.EqualValues(t, 32.0, buckets[5])
+ assert.EqualValues(t, 512.0, buckets[9])
+
+ assert.Panics(t, func() {
+ ExponentialBuckets(1, 2, 0)
+ })
+ assert.Panics(t, func() {
+ ExponentialBuckets(0, 2, 2)
+ })
+
+ assert.Panics(t, func() {
+ ExponentialBuckets(1, 1, 2)
+ })
+}
+
+func TestNewHistogram(t *testing.T) {
+ h := NewHistogram(nil).(*histogram)
+ assert.EqualValues(t, 0, h.count)
+ assert.EqualValues(t, 0.0, h.sum)
+ assert.Equal(t, DefBuckets, h.upperBounds)
+
+ h = NewHistogram([]float64{1, 10, 5}).(*histogram)
+ assert.Equal(t, []float64{1, 5, 10}, h.upperBounds)
+ assert.Len(t, h.buckets, 3)
+}
+
+func TestHistogram_WriteTo(t *testing.T) {
+ h := NewHistogram([]float64{1, 2, 3})
+ m := map[string]int64{}
+ h.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 5)
+ assert.EqualValues(t, 0, m["pi_count"])
+ assert.EqualValues(t, 0, m["pi_sum"])
+ assert.EqualValues(t, 0, m["pi_bucket_1"])
+ assert.EqualValues(t, 0, m["pi_bucket_2"])
+ assert.EqualValues(t, 0, m["pi_bucket_3"])
+
+ h.Observe(0)
+ h.Observe(1.5)
+ h.Observe(3.5)
+ h.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 5)
+ assert.EqualValues(t, 3, m["pi_count"])
+ assert.EqualValues(t, 500, m["pi_sum"])
+ assert.EqualValues(t, 1, m["pi_bucket_1"])
+ assert.EqualValues(t, 2, m["pi_bucket_2"])
+ assert.EqualValues(t, 2, m["pi_bucket_3"])
+}
+
+func TestHistogram_searchBucketIndex(t *testing.T) {
+ h := NewHistogram(LinearBuckets(1, 1, 5)).(*histogram) // [1, 2, ..., 5]
+ assert.Equal(t, 0, h.searchBucketIndex(0.1))
+ assert.Equal(t, 1, h.searchBucketIndex(1.1))
+ assert.Equal(t, 5, h.searchBucketIndex(8.1))
+
+ h = NewHistogram(LinearBuckets(1, 1, 40)).(*histogram) // [1, 2, ..., 5]
+ assert.Equal(t, 0, h.searchBucketIndex(0.1))
+ assert.Equal(t, 1, h.searchBucketIndex(1.1))
+ assert.Equal(t, 5, h.searchBucketIndex(5.1))
+ assert.Equal(t, 7, h.searchBucketIndex(8))
+ assert.Equal(t, 39, h.searchBucketIndex(39.5))
+ assert.Equal(t, 40, h.searchBucketIndex(40.5))
+}
+
+func BenchmarkHistogram_Observe(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ buckets []float64
+ }{
+ {"default", nil},
+ {"len_10", LinearBuckets(0, 0.1, 10)},
+ {"len_20", LinearBuckets(0, 0.1, 20)},
+ {"len_30", LinearBuckets(0, 0.1, 30)},
+ {"len_40", LinearBuckets(0, 0.1, 40)},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.B) {
+ h := NewHistogram(bm.buckets)
+ for i := 0; i < b.N; i++ {
+ h.Observe(2.5)
+ }
+ })
+ }
+}
+
+func BenchmarkHistogram_WriteTo(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ buckets []float64
+ }{
+ {"default", nil},
+ {"len_10", LinearBuckets(0, 0.1, 10)},
+ {"len_20", LinearBuckets(0, 0.1, 20)},
+ {"len_30", LinearBuckets(0, 0.1, 30)},
+ {"len_40", LinearBuckets(0, 0.1, 40)},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.B) {
+ h := NewHistogram(bm.buckets)
+ h.Observe(0.1)
+ h.Observe(0.01)
+ h.Observe(0.5)
+ h.Observe(10)
+ m := map[string]int64{}
+ for i := 0; i < b.N; i++ {
+ h.WriteTo(m, "pi", 100, 1)
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/metrics.go b/src/go/collectors/go.d.plugin/pkg/metrics/metrics.go
new file mode 100644
index 000000000..44a24056f
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/metrics.go
@@ -0,0 +1,12 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+
+// Observer is an interface that wraps the Observe method, which is used by
+// Histogram and Summary to add observations.
+type Observer interface {
+ stm.Value
+ Observe(v float64)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/summary.go b/src/go/collectors/go.d.plugin/pkg/metrics/summary.go
new file mode 100644
index 000000000..01b85f65e
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/summary.go
@@ -0,0 +1,125 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "math"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+)
+
+type (
+ // A Summary captures individual observations from an event or sample stream and
+ // summarizes them in a manner similar to traditional summary statistics:
+ // sum of observations
+ // observation count
+ // observation average.
+ //
+ // To create summary instances, use NewSummary.
+ Summary interface {
+ Observer
+ Reset()
+ }
+
+ // SummaryVec is a Collector that bundles a set of Summary which have different values for their names.
+ // This is used if you want to count the same thing partitioned by various dimensions
+ // (e.g. number of HTTP response time, partitioned by response code and method).
+ //
+ // Create instances with NewSummaryVec.
+ SummaryVec map[string]Summary
+
+ summary struct {
+ min float64
+ max float64
+ sum float64
+ count int64
+ }
+)
+
+var (
+ _ stm.Value = summary{}
+ _ stm.Value = SummaryVec{}
+)
+
+// NewSummary creates a new Summary.
+func NewSummary() Summary {
+ return &summary{
+ min: math.MaxFloat64,
+ max: -math.MaxFloat64,
+ }
+}
+
+// WriteTo writes its values into given map.
+// It adds those key-value pairs:
+//
+// ${key}_sum gauge, for sum of it's observed values from last Reset calls
+// ${key}_count counter, for count of it's observed values from last Reset calls
+// ${key}_min gauge, for min of it's observed values from last Reset calls (only exists if count > 0)
+// ${key}_max gauge, for max of it's observed values from last Reset calls (only exists if count > 0)
+// ${key}_avg gauge, for avg of it's observed values from last Reset calls (only exists if count > 0)
+func (s summary) WriteTo(rv map[string]int64, key string, mul, div int) {
+ if s.count > 0 {
+ rv[key+"_min"] = int64(s.min * float64(mul) / float64(div))
+ rv[key+"_max"] = int64(s.max * float64(mul) / float64(div))
+ rv[key+"_sum"] = int64(s.sum * float64(mul) / float64(div))
+ rv[key+"_count"] = s.count
+ rv[key+"_avg"] = int64(s.sum / float64(s.count) * float64(mul) / float64(div))
+ } else {
+ rv[key+"_count"] = 0
+ rv[key+"_sum"] = 0
+ delete(rv, key+"_min")
+ delete(rv, key+"_max")
+ delete(rv, key+"_avg")
+ }
+}
+
+// Reset resets all of its counters.
+// Call it before every scrape loop.
+func (s *summary) Reset() {
+ s.min = math.MaxFloat64
+ s.max = -math.MaxFloat64
+ s.sum = 0
+ s.count = 0
+}
+
+// Observe observes a value
+func (s *summary) Observe(v float64) {
+ if v > s.max {
+ s.max = v
+ }
+ if v < s.min {
+ s.min = v
+ }
+ s.sum += v
+ s.count++
+}
+
+// NewSummaryVec creates a new SummaryVec instance.
+func NewSummaryVec() SummaryVec {
+ return SummaryVec{}
+}
+
+// WriteTo writes its value into given map.
+func (c SummaryVec) WriteTo(rv map[string]int64, key string, mul, div int) {
+ for name, value := range c {
+ value.WriteTo(rv, key+"_"+name, mul, div)
+ }
+}
+
+// Get gets counter instance by name.
+func (c SummaryVec) Get(name string) Summary {
+ item, ok := c[name]
+ if ok {
+ return item
+ }
+ item = NewSummary()
+ c[name] = item
+ return item
+}
+
+// Reset resets its all summaries.
+func (c SummaryVec) Reset() {
+ for _, value := range c {
+ value.Reset()
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/summary_test.go b/src/go/collectors/go.d.plugin/pkg/metrics/summary_test.go
new file mode 100644
index 000000000..b98218369
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/summary_test.go
@@ -0,0 +1,78 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNewSummary(t *testing.T) {
+ s := NewSummary().(*summary)
+ assert.EqualValues(t, 0, s.count)
+ assert.Equal(t, 0.0, s.sum)
+ s.Observe(3.14)
+ assert.Equal(t, 3.14, s.min)
+ assert.Equal(t, 3.14, s.max)
+}
+
+func TestSummary_WriteTo(t *testing.T) {
+ s := NewSummary()
+
+ m1 := map[string]int64{}
+ s.WriteTo(m1, "pi", 100, 1)
+ assert.Len(t, m1, 2)
+ assert.Contains(t, m1, "pi_count")
+ assert.Contains(t, m1, "pi_sum")
+ assert.EqualValues(t, 0, m1["pi_count"])
+ assert.EqualValues(t, 0, m1["pi_sum"])
+
+ s.Observe(3.14)
+ s.Observe(2.71)
+ s.Observe(-10)
+
+ m2 := map[string]int64{}
+ s.WriteTo(m1, "pi", 100, 1)
+ s.WriteTo(m2, "pi", 100, 1)
+ assert.Equal(t, m1, m2)
+ assert.Len(t, m1, 5)
+ assert.EqualValues(t, 3, m1["pi_count"])
+ assert.EqualValues(t, -415, m1["pi_sum"])
+ assert.EqualValues(t, -1000, m1["pi_min"])
+ assert.EqualValues(t, 314, m1["pi_max"])
+ assert.EqualValues(t, -138, m1["pi_avg"])
+
+ s.Reset()
+ s.WriteTo(m1, "pi", 100, 1)
+ assert.Len(t, m1, 2)
+ assert.Contains(t, m1, "pi_count")
+ assert.Contains(t, m1, "pi_sum")
+ assert.EqualValues(t, 0, m1["pi_count"])
+ assert.EqualValues(t, 0, m1["pi_sum"])
+}
+
+func TestSummary_Reset(t *testing.T) {
+ s := NewSummary().(*summary)
+ s.Observe(1)
+ s.Reset()
+ assert.EqualValues(t, 0, s.count)
+}
+
+func BenchmarkSummary_Observe(b *testing.B) {
+ s := NewSummary()
+ for i := 0; i < b.N; i++ {
+ s.Observe(2.5)
+ }
+}
+
+func BenchmarkSummary_WriteTo(b *testing.B) {
+ s := NewSummary()
+ s.Observe(2.5)
+ s.Observe(3.5)
+ s.Observe(4.5)
+ m := map[string]int64{}
+ for i := 0; i < b.N; i++ {
+ s.WriteTo(m, "pi", 100, 1)
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/unique_counter.go b/src/go/collectors/go.d.plugin/pkg/metrics/unique_counter.go
new file mode 100644
index 000000000..dfc96126a
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/unique_counter.go
@@ -0,0 +1,109 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "github.com/axiomhq/hyperloglog"
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+)
+
+type (
+ UniqueCounter interface {
+ stm.Value
+ Insert(s string)
+ Value() int
+ Reset()
+ }
+
+ mapUniqueCounter struct {
+ m map[string]bool
+ }
+
+ hyperLogLogUniqueCounter struct {
+ sketch *hyperloglog.Sketch
+ }
+
+ UniqueCounterVec struct {
+ useHyperLogLog bool
+ Items map[string]UniqueCounter
+ }
+)
+
+var (
+ _ stm.Value = mapUniqueCounter{}
+ _ stm.Value = hyperLogLogUniqueCounter{}
+ _ stm.Value = UniqueCounterVec{}
+)
+
+func NewUniqueCounter(useHyperLogLog bool) UniqueCounter {
+ if useHyperLogLog {
+ return &hyperLogLogUniqueCounter{hyperloglog.New()}
+ }
+ return mapUniqueCounter{map[string]bool{}}
+}
+
+func (c mapUniqueCounter) WriteTo(rv map[string]int64, key string, mul, div int) {
+ rv[key] = int64(float64(c.Value()*mul) / float64(div))
+}
+
+func (c mapUniqueCounter) Insert(s string) {
+ c.m[s] = true
+}
+
+func (c mapUniqueCounter) Value() int {
+ return len(c.m)
+}
+
+func (c mapUniqueCounter) Reset() {
+ for key := range c.m {
+ delete(c.m, key)
+ }
+}
+
+// WriteTo writes its value into given map.
+func (c hyperLogLogUniqueCounter) WriteTo(rv map[string]int64, key string, mul, div int) {
+ rv[key] = int64(float64(c.Value()*mul) / float64(div))
+}
+
+func (c *hyperLogLogUniqueCounter) Insert(s string) {
+ c.sketch.Insert([]byte(s))
+}
+
+func (c *hyperLogLogUniqueCounter) Value() int {
+ return int(c.sketch.Estimate())
+}
+
+func (c *hyperLogLogUniqueCounter) Reset() {
+ c.sketch = hyperloglog.New()
+}
+
+func NewUniqueCounterVec(useHyperLogLog bool) UniqueCounterVec {
+ return UniqueCounterVec{
+ Items: map[string]UniqueCounter{},
+ useHyperLogLog: useHyperLogLog,
+ }
+}
+
+// WriteTo writes its value into given map.
+func (c UniqueCounterVec) WriteTo(rv map[string]int64, key string, mul, div int) {
+ for name, value := range c.Items {
+ value.WriteTo(rv, key+"_"+name, mul, div)
+ }
+}
+
+// Get gets UniqueCounter instance by name
+func (c UniqueCounterVec) Get(name string) UniqueCounter {
+ item, ok := c.Items[name]
+ if ok {
+ return item
+ }
+ item = NewUniqueCounter(c.useHyperLogLog)
+ c.Items[name] = item
+ return item
+}
+
+func (c UniqueCounterVec) Reset() {
+ for _, value := range c.Items {
+ value.Reset()
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/metrics/unique_counter_test.go b/src/go/collectors/go.d.plugin/pkg/metrics/unique_counter_test.go
new file mode 100644
index 000000000..b9439c9a3
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/metrics/unique_counter_test.go
@@ -0,0 +1,145 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package metrics
+
+import (
+ "fmt"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHyperLogLogUniqueCounter_Value(t *testing.T) {
+ for _, useHLL := range []bool{true, false} {
+ t.Run(fmt.Sprintf("HLL=%v", useHLL), func(t *testing.T) {
+ c := NewUniqueCounter(useHLL)
+ assert.Equal(t, 0, c.Value())
+
+ c.Insert("foo")
+ assert.Equal(t, 1, c.Value())
+
+ c.Insert("foo")
+ assert.Equal(t, 1, c.Value())
+
+ c.Insert("bar")
+ assert.Equal(t, 2, c.Value())
+
+ c.Insert("baz")
+ assert.Equal(t, 3, c.Value())
+
+ c.Reset()
+ assert.Equal(t, 0, c.Value())
+
+ c.Insert("foo")
+ assert.Equal(t, 1, c.Value())
+ })
+ }
+}
+
+func TestHyperLogLogUniqueCounter_WriteTo(t *testing.T) {
+ for _, useHLL := range []bool{true, false} {
+ t.Run(fmt.Sprintf("HLL=%v", useHLL), func(t *testing.T) {
+ c := NewUniqueCounterVec(useHLL)
+ c.Get("a").Insert("foo")
+ c.Get("a").Insert("bar")
+ c.Get("b").Insert("foo")
+
+ m := map[string]int64{}
+ c.WriteTo(m, "pi", 100, 1)
+ assert.Len(t, m, 2)
+ assert.EqualValues(t, 200, m["pi_a"])
+ assert.EqualValues(t, 100, m["pi_b"])
+ })
+ }
+}
+
+func TestUniqueCounterVec_Reset(t *testing.T) {
+ for _, useHLL := range []bool{true, false} {
+ t.Run(fmt.Sprintf("HLL=%v", useHLL), func(t *testing.T) {
+ c := NewUniqueCounterVec(useHLL)
+ c.Get("a").Insert("foo")
+ c.Get("a").Insert("bar")
+ c.Get("b").Insert("foo")
+
+ assert.Equal(t, 2, len(c.Items))
+ assert.Equal(t, 2, c.Get("a").Value())
+ assert.Equal(t, 1, c.Get("b").Value())
+
+ c.Reset()
+ assert.Equal(t, 2, len(c.Items))
+ assert.Equal(t, 0, c.Get("a").Value())
+ assert.Equal(t, 0, c.Get("b").Value())
+ })
+ }
+}
+
+func BenchmarkUniqueCounter_Insert(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ same bool
+ hyperloglog bool
+ nop bool
+ }{
+
+ {"map-same", true, false, false},
+ {"hll-same", true, true, false},
+
+ {"nop", false, false, true},
+ {"map-diff", false, false, false},
+ {"hll-diff", false, true, false},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.B) {
+ c := NewUniqueCounter(bm.hyperloglog)
+ if bm.same {
+ for i := 0; i < b.N; i++ {
+ c.Insert("foo")
+ }
+ } else if bm.nop {
+ for i := 0; i < b.N; i++ {
+ strconv.Itoa(i)
+ }
+ } else {
+ for i := 0; i < b.N; i++ {
+ c.Insert(strconv.Itoa(i))
+ }
+ }
+ })
+ }
+}
+
+func BenchmarkUniqueCounterVec_Insert(b *testing.B) {
+ benchmarks := []struct {
+ name string
+ same bool
+ hyperloglog bool
+ nop bool
+ }{
+
+ {"map-same", true, false, false},
+ {"hll-same", true, true, false},
+
+ {"nop", false, false, true},
+ {"map-diff", false, false, false},
+ {"hll-diff", false, true, false},
+ }
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.B) {
+ c := NewUniqueCounterVec(bm.hyperloglog)
+ if bm.same {
+ for i := 0; i < b.N; i++ {
+ c.Get("a").Insert("foo")
+ }
+ } else if bm.nop {
+ for i := 0; i < b.N; i++ {
+ strconv.Itoa(i)
+ }
+ } else {
+ for i := 0; i < b.N; i++ {
+ c.Get("a").Insert(strconv.Itoa(i))
+ }
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/multipath/multipath.go b/src/go/collectors/go.d.plugin/pkg/multipath/multipath.go
new file mode 100644
index 000000000..6172def06
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/multipath/multipath.go
@@ -0,0 +1,90 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package multipath
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "slices"
+ "strings"
+
+ "github.com/mitchellh/go-homedir"
+)
+
+type ErrNotFound struct{ msg string }
+
+func (e ErrNotFound) Error() string { return e.msg }
+
+// IsNotFound returns a boolean indicating whether the error is ErrNotFound or not.
+func IsNotFound(err error) bool {
+ var errNotFound ErrNotFound
+ return errors.As(err, &errNotFound)
+}
+
+// MultiPath multi-paths
+type MultiPath []string
+
+// New multi-paths
+func New(paths ...string) MultiPath {
+ set := map[string]bool{}
+ mPath := make(MultiPath, 0)
+
+ for _, dir := range paths {
+ if dir == "" {
+ continue
+ }
+ if d, err := homedir.Expand(dir); err != nil {
+ dir = d
+ }
+ if !set[dir] {
+ mPath = append(mPath, dir)
+ set[dir] = true
+ }
+ }
+
+ return mPath
+}
+
+// Find finds a file in given paths
+func (p MultiPath) Find(filename string) (string, error) {
+ for _, dir := range p {
+ file := filepath.Join(dir, filename)
+ if _, err := os.Stat(file); !os.IsNotExist(err) {
+ return file, nil
+ }
+ }
+ return "", ErrNotFound{msg: fmt.Sprintf("can't find '%s' in %v", filename, p)}
+}
+
+func (p MultiPath) FindFiles(suffixes ...string) ([]string, error) {
+ set := make(map[string]bool)
+ var files []string
+
+ for _, dir := range p {
+ entries, err := os.ReadDir(dir)
+ if err != nil {
+ continue
+ }
+
+ for _, e := range entries {
+ if !e.Type().IsRegular() {
+ continue
+ }
+
+ ext := filepath.Ext(e.Name())
+ name := strings.TrimSuffix(e.Name(), ext)
+
+ if (len(suffixes) != 0 && !slices.Contains(suffixes, ext)) || set[name] {
+ continue
+ }
+
+ set[name] = true
+ file := filepath.Join(dir, e.Name())
+ files = append(files, file)
+ }
+ }
+
+ return files, nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/multipath/multipath_test.go b/src/go/collectors/go.d.plugin/pkg/multipath/multipath_test.go
new file mode 100644
index 000000000..cd6c90d95
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/multipath/multipath_test.go
@@ -0,0 +1,60 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package multipath
+
+import (
+ "errors"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNew(t *testing.T) {
+ assert.Len(
+ t,
+ New("path1", "path2", "path2", "", "path3"),
+ 3,
+ )
+}
+
+func TestMultiPath_Find(t *testing.T) {
+ m := New("path1", "testdata/data1")
+
+ v, err := m.Find("not exist")
+ assert.Zero(t, v)
+ assert.Error(t, err)
+
+ v, err = m.Find("test-empty.conf")
+ assert.Equal(t, "testdata/data1/test-empty.conf", v)
+ assert.Nil(t, err)
+
+ v, err = m.Find("test.conf")
+ assert.Equal(t, "testdata/data1/test.conf", v)
+ assert.Nil(t, err)
+}
+
+func TestIsNotFound(t *testing.T) {
+ assert.True(t, IsNotFound(ErrNotFound{}))
+ assert.False(t, IsNotFound(errors.New("")))
+}
+
+func TestMultiPath_FindFiles(t *testing.T) {
+ m := New("path1", "testdata/data2", "testdata/data1")
+
+ files, err := m.FindFiles(".conf")
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"testdata/data2/test-empty.conf", "testdata/data2/test.conf"}, files)
+
+ files, err = m.FindFiles()
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"testdata/data2/test-empty.conf", "testdata/data2/test.conf"}, files)
+
+ files, err = m.FindFiles(".not_exist")
+ assert.NoError(t, err)
+ assert.Equal(t, []string(nil), files)
+
+ m = New("path1", "testdata/data1", "testdata/data2")
+ files, err = m.FindFiles(".conf")
+ assert.NoError(t, err)
+ assert.Equal(t, []string{"testdata/data1/test-empty.conf", "testdata/data1/test.conf"}, files)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test-empty.conf b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test-empty.conf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test-empty.conf
diff --git a/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test.conf b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test.conf
new file mode 100644
index 000000000..aebe64730
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data1/test.conf
@@ -0,0 +1 @@
+not empty! \ No newline at end of file
diff --git a/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test-empty.conf b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test-empty.conf
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test-empty.conf
diff --git a/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test.conf b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test.conf
new file mode 100644
index 000000000..aebe64730
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/multipath/testdata/data2/test.conf
@@ -0,0 +1 @@
+not empty! \ No newline at end of file
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/client.go b/src/go/collectors/go.d.plugin/pkg/prometheus/client.go
new file mode 100644
index 000000000..3365b270c
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/client.go
@@ -0,0 +1,155 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package prometheus
+
+import (
+ "bufio"
+ "bytes"
+ "compress/gzip"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/prometheus/selector"
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/web"
+)
+
+type (
+ // Prometheus is a helper for scrape and parse prometheus format metrics.
+ Prometheus interface {
+ // ScrapeSeries and parse prometheus format metrics
+ ScrapeSeries() (Series, error)
+ Scrape() (MetricFamilies, error)
+ HTTPClient() *http.Client
+ }
+
+ prometheus struct {
+ client *http.Client
+ request web.Request
+ filepath string
+
+ sr selector.Selector
+
+ parser promTextParser
+
+ buf *bytes.Buffer
+ gzipr *gzip.Reader
+ bodyBuf *bufio.Reader
+ }
+)
+
+const (
+ acceptHeader = `text/plain;version=0.0.4;q=1,*/*;q=0.1`
+)
+
+// New creates a Prometheus instance.
+func New(client *http.Client, request web.Request) Prometheus {
+ return &prometheus{
+ client: client,
+ request: request,
+ buf: bytes.NewBuffer(make([]byte, 0, 16000)),
+ }
+}
+
+// NewWithSelector creates a Prometheus instance with the selector.
+func NewWithSelector(client *http.Client, request web.Request, sr selector.Selector) Prometheus {
+ p := &prometheus{
+ client: client,
+ request: request,
+ sr: sr,
+ buf: bytes.NewBuffer(make([]byte, 0, 16000)),
+ parser: promTextParser{sr: sr},
+ }
+
+ if v, err := url.Parse(request.URL); err == nil && v.Scheme == "file" {
+ p.filepath = filepath.Join(v.Host, v.Path)
+ }
+
+ return p
+}
+
+func (p *prometheus) HTTPClient() *http.Client {
+ return p.client
+}
+
+// ScrapeSeries scrapes metrics, parses and sorts
+func (p *prometheus) ScrapeSeries() (Series, error) {
+ p.buf.Reset()
+
+ if err := p.fetch(p.buf); err != nil {
+ return nil, err
+ }
+
+ return p.parser.parseToSeries(p.buf.Bytes())
+}
+
+func (p *prometheus) Scrape() (MetricFamilies, error) {
+ p.buf.Reset()
+
+ if err := p.fetch(p.buf); err != nil {
+ return nil, err
+ }
+
+ return p.parser.parseToMetricFamilies(p.buf.Bytes())
+}
+
+func (p *prometheus) fetch(w io.Writer) error {
+ // TODO: should be a separate text file prom client
+ if p.filepath != "" {
+ f, err := os.Open(p.filepath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ _, err = io.Copy(w, f)
+
+ return err
+ }
+
+ req, err := web.NewHTTPRequest(p.request)
+ if err != nil {
+ return err
+ }
+
+ req.Header.Add("Accept", acceptHeader)
+ req.Header.Add("Accept-Encoding", "gzip")
+
+ resp, err := p.client.Do(req)
+ if err != nil {
+ return err
+ }
+
+ defer func() {
+ _, _ = io.Copy(io.Discard, resp.Body)
+ _ = resp.Body.Close()
+ }()
+
+ if resp.StatusCode != http.StatusOK {
+ return fmt.Errorf("server '%s' returned HTTP status code %d (%s)", req.URL, resp.StatusCode, resp.Status)
+ }
+
+ if resp.Header.Get("Content-Encoding") != "gzip" {
+ _, err = io.Copy(w, resp.Body)
+ return err
+ }
+
+ if p.gzipr == nil {
+ p.bodyBuf = bufio.NewReader(resp.Body)
+ p.gzipr, err = gzip.NewReader(p.bodyBuf)
+ if err != nil {
+ return err
+ }
+ } else {
+ p.bodyBuf.Reset(resp.Body)
+ _ = p.gzipr.Reset(p.bodyBuf)
+ }
+
+ _, err = io.Copy(w, p.gzipr)
+ _ = p.gzipr.Close()
+
+ return err
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/client_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/client_test.go
new file mode 100644
index 000000000..76199800a
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/client_test.go
@@ -0,0 +1,137 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package prometheus
+
+import (
+ "bytes"
+ "compress/gzip"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/prometheus/selector"
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/web"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+var (
+ testData, _ = os.ReadFile("testdata/testdata.txt")
+ testDataNoMeta, _ = os.ReadFile("testdata/testdata.nometa.txt")
+)
+
+func Test_testClientDataIsValid(t *testing.T) {
+ for name, data := range map[string][]byte{
+ "testData": testData,
+ } {
+ require.NotNilf(t, data, name)
+ }
+}
+
+func TestPrometheus404(t *testing.T) {
+ tsMux := http.NewServeMux()
+ tsMux.HandleFunc("/metrics", func(w http.ResponseWriter, r *http.Request) {
+ w.WriteHeader(404)
+ })
+ ts := httptest.NewServer(tsMux)
+ defer ts.Close()
+
+ req := web.Request{URL: ts.URL + "/metrics"}
+ prom := New(http.DefaultClient, req)
+ res, err := prom.ScrapeSeries()
+
+ assert.Error(t, err)
+ assert.Nil(t, res)
+}
+
+func TestPrometheusPlain(t *testing.T) {
+ tsMux := http.NewServeMux()
+ tsMux.HandleFunc("/metrics", func(w http.ResponseWriter, r *http.Request) {
+ _, _ = w.Write(testData)
+ })
+ ts := httptest.NewServer(tsMux)
+ defer ts.Close()
+
+ req := web.Request{URL: ts.URL + "/metrics"}
+ prom := New(http.DefaultClient, req)
+ res, err := prom.ScrapeSeries()
+
+ assert.NoError(t, err)
+ verifyTestData(t, res)
+}
+
+func TestPrometheusPlainWithSelector(t *testing.T) {
+ tsMux := http.NewServeMux()
+ tsMux.HandleFunc("/metrics", func(w http.ResponseWriter, r *http.Request) {
+ _, _ = w.Write(testData)
+ })
+ ts := httptest.NewServer(tsMux)
+ defer ts.Close()
+
+ req := web.Request{URL: ts.URL + "/metrics"}
+ sr, err := selector.Parse("go_gc*")
+ require.NoError(t, err)
+ prom := NewWithSelector(http.DefaultClient, req, sr)
+
+ res, err := prom.ScrapeSeries()
+ require.NoError(t, err)
+
+ for _, v := range res {
+ assert.Truef(t, strings.HasPrefix(v.Name(), "go_gc"), v.Name())
+ }
+}
+
+func TestPrometheusGzip(t *testing.T) {
+ counter := 0
+ rawTestData := [][]byte{testData, testDataNoMeta}
+ tsMux := http.NewServeMux()
+ tsMux.HandleFunc("/metrics", func(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Encoding", "gzip")
+ w.WriteHeader(200)
+ gz := new(bytes.Buffer)
+ ww := gzip.NewWriter(gz)
+ _, _ = ww.Write(rawTestData[counter])
+ _ = ww.Close()
+ _, _ = gz.WriteTo(w)
+ counter++
+ })
+ ts := httptest.NewServer(tsMux)
+ defer ts.Close()
+
+ req := web.Request{URL: ts.URL + "/metrics"}
+ prom := New(http.DefaultClient, req)
+
+ for i := 0; i < 2; i++ {
+ res, err := prom.ScrapeSeries()
+ assert.NoError(t, err)
+ verifyTestData(t, res)
+ }
+}
+
+func TestPrometheusReadFromFile(t *testing.T) {
+ req := web.Request{URL: "file://testdata/testdata.txt"}
+ prom := NewWithSelector(http.DefaultClient, req, nil)
+
+ for i := 0; i < 2; i++ {
+ res, err := prom.ScrapeSeries()
+ assert.NoError(t, err)
+ verifyTestData(t, res)
+ }
+}
+
+func verifyTestData(t *testing.T, ms Series) {
+ assert.Equal(t, 410, len(ms))
+ assert.Equal(t, "go_gc_duration_seconds", ms[0].Labels.Get("__name__"))
+ assert.Equal(t, "0.25", ms[0].Labels.Get("quantile"))
+ assert.InDelta(t, 4.9351e-05, ms[0].Value, 0.0001)
+
+ notExistYet := ms.FindByName("not_exist_yet")
+ assert.NotNil(t, notExistYet)
+ assert.Len(t, notExistYet, 0)
+
+ targetInterval := ms.FindByName("prometheus_target_interval_length_seconds")
+ assert.Len(t, targetInterval, 5)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/metric_family.go b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_family.go
new file mode 100644
index 000000000..dde08801e
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_family.go
@@ -0,0 +1,116 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package prometheus
+
+import (
+ "github.com/prometheus/common/model"
+ "github.com/prometheus/prometheus/model/labels"
+)
+
+type (
+ MetricFamilies map[string]*MetricFamily
+
+ MetricFamily struct {
+ name string
+ help string
+ typ model.MetricType
+ metrics []Metric
+ }
+ Metric struct {
+ labels []labels.Label
+ gauge *Gauge
+ counter *Counter
+ summary *Summary
+ histogram *Histogram
+ untyped *Untyped
+ }
+ Gauge struct {
+ value float64
+ }
+ Counter struct {
+ value float64
+ }
+ Summary struct {
+ sum float64
+ count float64
+ quantiles []Quantile
+ }
+ Quantile struct {
+ quantile float64
+ value float64
+ }
+ Histogram struct {
+ sum float64
+ count float64
+ buckets []Bucket
+ }
+ Bucket struct {
+ upperBound float64
+ cumulativeCount float64
+ }
+ Untyped struct {
+ value float64
+ }
+)
+
+func (mfs MetricFamilies) Len() int {
+ return len(mfs)
+}
+
+func (mfs MetricFamilies) Get(name string) *MetricFamily {
+ return (mfs)[name]
+}
+
+func (mfs MetricFamilies) GetGauge(name string) *MetricFamily {
+ return mfs.get(name, model.MetricTypeGauge)
+}
+
+func (mfs MetricFamilies) GetCounter(name string) *MetricFamily {
+ return mfs.get(name, model.MetricTypeCounter)
+}
+
+func (mfs MetricFamilies) GetSummary(name string) *MetricFamily {
+ return mfs.get(name, model.MetricTypeSummary)
+}
+
+func (mfs MetricFamilies) GetHistogram(name string) *MetricFamily {
+ return mfs.get(name, model.MetricTypeHistogram)
+}
+
+func (mfs MetricFamilies) get(name string, typ model.MetricType) *MetricFamily {
+ mf := mfs.Get(name)
+ if mf == nil || mf.typ != typ {
+ return nil
+ }
+ return mf
+}
+
+func (mf *MetricFamily) Name() string { return mf.name }
+func (mf *MetricFamily) Help() string { return mf.help }
+func (mf *MetricFamily) Type() model.MetricType { return mf.typ }
+func (mf *MetricFamily) Metrics() []Metric { return mf.metrics }
+
+func (m *Metric) Labels() labels.Labels { return m.labels }
+func (m *Metric) Gauge() *Gauge { return m.gauge }
+func (m *Metric) Counter() *Counter { return m.counter }
+func (m *Metric) Summary() *Summary { return m.summary }
+func (m *Metric) Histogram() *Histogram { return m.histogram }
+func (m *Metric) Untyped() *Untyped { return m.untyped }
+
+func (g Gauge) Value() float64 { return g.value }
+func (c Counter) Value() float64 { return c.value }
+func (u Untyped) Value() float64 { return u.value }
+
+func (s Summary) Count() float64 { return s.count }
+func (s Summary) Sum() float64 { return s.sum }
+func (s Summary) Quantiles() []Quantile { return s.quantiles }
+
+func (q Quantile) Quantile() float64 { return q.quantile }
+func (q Quantile) Value() float64 { return q.value }
+
+func (h Histogram) Count() float64 { return h.count }
+func (h Histogram) Sum() float64 { return h.sum }
+func (h Histogram) Buckets() []Bucket { return h.buckets }
+
+func (b Bucket) UpperBound() float64 { return b.upperBound }
+func (b Bucket) CumulativeCount() float64 { return b.cumulativeCount }
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/metric_family_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_family_test.go
new file mode 100644
index 000000000..f373996da
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_family_test.go
@@ -0,0 +1,356 @@
+package prometheus
+
+import (
+ "testing"
+
+ "github.com/prometheus/common/model"
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMetricFamilies_Len(t *testing.T) {
+ tests := map[string]struct {
+ mfs MetricFamilies
+ wantLen int
+ }{
+ "initialized with two elements": {
+ mfs: MetricFamilies{"1": nil, "2": nil},
+ wantLen: 2,
+ },
+ "not initialized": {
+ mfs: nil,
+ wantLen: 0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.mfs.Len(), test.wantLen)
+ })
+ }
+}
+
+func TestMetricFamilies_Get(t *testing.T) {
+ const n = "metric"
+
+ tests := map[string]struct {
+ mfs MetricFamilies
+ wantMF *MetricFamily
+ }{
+ "etric is found": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n}},
+ wantMF: &MetricFamily{name: n},
+ },
+ "metric is not found": {
+ mfs: MetricFamilies{"!" + n: &MetricFamily{name: n}},
+ wantMF: nil,
+ },
+ "not initialized": {
+ mfs: nil,
+ wantMF: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.mfs.Get(n), test.wantMF)
+ })
+ }
+}
+
+func TestMetricFamilies_GetGauge(t *testing.T) {
+ const n = "metric"
+
+ tests := map[string]struct {
+ mfs MetricFamilies
+ wantMF *MetricFamily
+ }{
+ "metric is found and is Gauge": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: &MetricFamily{name: n, typ: model.MetricTypeGauge},
+ },
+ "metric is found but it is not Gauge": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeUnknown}},
+ wantMF: nil,
+ },
+ "metric is not found": {
+ mfs: MetricFamilies{"!" + n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "not initialized": {
+ mfs: nil,
+ wantMF: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.mfs.GetGauge(n), test.wantMF)
+ })
+ }
+}
+
+func TestMetricFamilies_GetCounter(t *testing.T) {
+ const n = "metric"
+
+ tests := map[string]struct {
+ mfs MetricFamilies
+ wantMF *MetricFamily
+ }{
+ "metric is found and is Counter": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeCounter}},
+ wantMF: &MetricFamily{name: n, typ: model.MetricTypeCounter},
+ },
+ "metric is found but it is not Counter": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "metric is not found": {
+ mfs: MetricFamilies{"!" + n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "not initialized": {
+ mfs: nil,
+ wantMF: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.mfs.GetCounter(n), test.wantMF)
+ })
+ }
+}
+
+func TestMetricFamilies_GetSummary(t *testing.T) {
+ const n = "metric"
+
+ tests := map[string]struct {
+ mfs MetricFamilies
+ wantMF *MetricFamily
+ }{
+ "metric is found and is Summary": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeSummary}},
+ wantMF: &MetricFamily{name: n, typ: model.MetricTypeSummary},
+ },
+ "metric is found but it is not Summary": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "metric is not found": {
+ mfs: MetricFamilies{"!" + n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "not initialized": {
+ mfs: nil,
+ wantMF: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.mfs.GetSummary(n), test.wantMF)
+ })
+ }
+}
+
+func TestMetricFamilies_GetHistogram(t *testing.T) {
+ const n = "metric"
+
+ tests := map[string]struct {
+ mfs MetricFamilies
+ wantMF *MetricFamily
+ }{
+ "metric is found and is Histogram": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeHistogram}},
+ wantMF: &MetricFamily{name: n, typ: model.MetricTypeHistogram},
+ },
+ "metric is found but it is not Histogram": {
+ mfs: MetricFamilies{n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "metric is not found": {
+ mfs: MetricFamilies{"!" + n: &MetricFamily{name: n, typ: model.MetricTypeGauge}},
+ wantMF: nil,
+ },
+ "not initialized": {
+ mfs: nil,
+ wantMF: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.mfs.GetHistogram(n), test.wantMF)
+ })
+ }
+}
+
+func TestMetricFamily_Name(t *testing.T) {
+ mf := &MetricFamily{name: "name"}
+ assert.Equal(t, mf.Name(), "name")
+}
+
+func TestMetricFamily_Type(t *testing.T) {
+ mf := &MetricFamily{typ: model.MetricTypeGauge}
+ assert.Equal(t, mf.Type(), model.MetricTypeGauge)
+}
+
+func TestMetricFamily_Help(t *testing.T) {
+ mf := &MetricFamily{help: "help"}
+ assert.Equal(t, mf.Help(), "help")
+}
+
+func TestMetricFamily_Metrics(t *testing.T) {
+ metrics := []Metric{{gauge: &Gauge{value: 1}, counter: &Counter{value: 1}}}
+ mf := &MetricFamily{metrics: metrics}
+ assert.Equal(t, mf.Metrics(), metrics)
+}
+
+func TestMetric_Labels(t *testing.T) {
+ lbs := labels.Labels{{Name: "1", Value: "1"}, {Name: "2", Value: "2"}}
+ m := &Metric{labels: lbs}
+ assert.Equal(t, m.Labels(), lbs)
+}
+
+func TestMetric_Gauge(t *testing.T) {
+ tests := map[string]struct {
+ m *Metric
+ want *Gauge
+ }{
+ "gauge set": {
+ m: &Metric{gauge: &Gauge{value: 1}},
+ want: &Gauge{value: 1},
+ },
+ "gauge not set": {
+ m: &Metric{},
+ want: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.m.Gauge(), test.want)
+ })
+ }
+}
+
+func TestMetric_Counter(t *testing.T) {
+ tests := map[string]struct {
+ m *Metric
+ want *Counter
+ }{
+ "counter set": {
+ m: &Metric{counter: &Counter{value: 1}},
+ want: &Counter{value: 1},
+ },
+ "counter not set": {
+ m: &Metric{},
+ want: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.m.Counter(), test.want)
+ })
+ }
+}
+
+func TestMetric_Summary(t *testing.T) {
+ tests := map[string]struct {
+ m *Metric
+ want *Summary
+ }{
+ "summary set": {
+ m: &Metric{summary: &Summary{sum: 0.1, count: 3}},
+ want: &Summary{sum: 0.1, count: 3},
+ },
+ "summary not set": {
+ m: &Metric{},
+ want: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.m.Summary(), test.want)
+ })
+ }
+}
+
+func TestMetric_Histogram(t *testing.T) {
+ tests := map[string]struct {
+ m *Metric
+ want *Histogram
+ }{
+ "histogram set": {
+ m: &Metric{histogram: &Histogram{sum: 0.1, count: 3}},
+ want: &Histogram{sum: 0.1, count: 3},
+ },
+ "histogram not set": {
+ m: &Metric{},
+ want: nil,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.m.Histogram(), test.want)
+ })
+ }
+}
+
+func TestGauge_Value(t *testing.T) {
+ assert.Equal(t, Gauge{value: 1}.Value(), 1.0)
+}
+
+func TestCounter_Value(t *testing.T) {
+ assert.Equal(t, Counter{value: 1}.Value(), 1.0)
+}
+
+func TestSummary_Sum(t *testing.T) {
+ assert.Equal(t, Summary{sum: 1}.Sum(), 1.0)
+}
+
+func TestSummary_Count(t *testing.T) {
+ assert.Equal(t, Summary{count: 1}.Count(), 1.0)
+}
+
+func TestSummary_Quantiles(t *testing.T) {
+ assert.Equal(t,
+ Summary{quantiles: []Quantile{{quantile: 0.1, value: 1}}}.Quantiles(),
+ []Quantile{{quantile: 0.1, value: 1}},
+ )
+}
+
+func TestQuantile_Value(t *testing.T) {
+ assert.Equal(t, Quantile{value: 1}.Value(), 1.0)
+}
+
+func TestQuantile_Quantile(t *testing.T) {
+ assert.Equal(t, Quantile{quantile: 0.1}.Quantile(), 0.1)
+}
+
+func TestHistogram_Sum(t *testing.T) {
+ assert.Equal(t, Histogram{sum: 1}.Sum(), 1.0)
+}
+
+func TestHistogram_Count(t *testing.T) {
+ assert.Equal(t, Histogram{count: 1}.Count(), 1.0)
+}
+
+func TestHistogram_Buckets(t *testing.T) {
+ assert.Equal(t,
+ Histogram{buckets: []Bucket{{upperBound: 0.1, cumulativeCount: 1}}}.Buckets(),
+ []Bucket{{upperBound: 0.1, cumulativeCount: 1}},
+ )
+}
+
+func TestBucket_UpperBound(t *testing.T) {
+ assert.Equal(t, Bucket{upperBound: 0.1}.UpperBound(), 0.1)
+}
+
+func TestBucket_CumulativeCount(t *testing.T) {
+ assert.Equal(t, Bucket{cumulativeCount: 1}.CumulativeCount(), 1.0)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/metric_series.go b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_series.go
new file mode 100644
index 000000000..31914f4b2
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_series.go
@@ -0,0 +1,110 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package prometheus
+
+import (
+ "sort"
+
+ "github.com/prometheus/prometheus/model/labels"
+)
+
+type (
+ // SeriesSample is a pair of label set and value
+ SeriesSample struct {
+ Labels labels.Labels
+ Value float64
+ }
+
+ // Series is a list of SeriesSample
+ Series []SeriesSample
+)
+
+// Name the __name__ label value
+func (s SeriesSample) Name() string {
+ return s.Labels[0].Value
+}
+
+// Add appends a metric.
+func (s *Series) Add(kv SeriesSample) {
+ *s = append(*s, kv)
+}
+
+// Reset resets the buffer to be empty,
+// but it retains the underlying storage for use by future writes.
+func (s *Series) Reset() {
+ *s = (*s)[:0]
+}
+
+// Sort sorts data.
+func (s Series) Sort() {
+ sort.Sort(s)
+}
+
+// Len returns metric length.
+func (s Series) Len() int {
+ return len(s)
+}
+
+// Less reports whether the element with
+// index i should sort before the element with index j.
+func (s Series) Less(i, j int) bool {
+ return s[i].Name() < s[j].Name()
+}
+
+// Swap swaps the elements with indexes i and j.
+func (s Series) Swap(i, j int) {
+ s[i], s[j] = s[j], s[i]
+}
+
+// FindByName finds metrics where it's __name__ label matches given name.
+// It expects the metrics is sorted.
+// Complexity: O(log(N))
+func (s Series) FindByName(name string) Series {
+ from := sort.Search(len(s), func(i int) bool {
+ return s[i].Name() >= name
+ })
+ if from == len(s) || s[from].Name() != name { // not found
+ return Series{}
+ }
+ until := from + 1
+ for until < len(s) && s[until].Name() == name {
+ until++
+ }
+ return s[from:until]
+}
+
+// FindByNames finds metrics where it's __name__ label matches given any of names.
+// It expects the metrics is sorted.
+// Complexity: O(log(N))
+func (s Series) FindByNames(names ...string) Series {
+ switch len(names) {
+ case 0:
+ return Series{}
+ case 1:
+ return s.FindByName(names[0])
+ }
+ var result Series
+ for _, name := range names {
+ result = append(result, s.FindByName(name)...)
+ }
+ return result
+}
+
+// Max returns the max value.
+// It does NOT expect the metrics is sorted.
+// Complexity: O(N)
+func (s Series) Max() float64 {
+ switch len(s) {
+ case 0:
+ return 0
+ case 1:
+ return s[0].Value
+ }
+ max := s[0].Value
+ for _, kv := range s[1:] {
+ if max < kv.Value {
+ max = kv.Value
+ }
+ }
+ return max
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/metric_series_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_series_test.go
new file mode 100644
index 000000000..80c805474
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/metric_series_test.go
@@ -0,0 +1,140 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package prometheus
+
+import (
+ "testing"
+
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// TODO: write better tests
+
+const (
+ testName1 = "logback_events_total"
+ testName2 = "jvm_threads_peak"
+)
+
+func newTestSeries() Series {
+ return Series{
+ {
+ Value: 10,
+ Labels: labels.Labels{
+ {Name: "__name__", Value: testName1},
+ {Name: "level", Value: "error"},
+ },
+ },
+ {
+ Value: 20,
+ Labels: labels.Labels{
+ {Name: "__name__", Value: testName1},
+ {Name: "level", Value: "warn"},
+ },
+ },
+ {
+ Value: 5,
+ Labels: labels.Labels{
+ {Name: "__name__", Value: testName1},
+ {Name: "level", Value: "info"},
+ },
+ },
+ {
+ Value: 15,
+ Labels: labels.Labels{
+ {Name: "__name__", Value: testName1},
+ {Name: "level", Value: "debug"},
+ },
+ },
+ {
+ Value: 26,
+ Labels: labels.Labels{
+ {Name: "__name__", Value: testName2},
+ },
+ },
+ }
+}
+
+func TestSeries_Name(t *testing.T) {
+ m := newTestSeries()
+
+ assert.Equal(t, testName1, m[0].Name())
+ assert.Equal(t, testName1, m[1].Name())
+
+}
+
+func TestSeries_Add(t *testing.T) {
+ m := newTestSeries()
+
+ require.Len(t, m, 5)
+ m.Add(SeriesSample{})
+ assert.Len(t, m, 6)
+}
+
+func TestSeries_FindByName(t *testing.T) {
+ m := newTestSeries()
+ m.Sort()
+ assert.Len(t, Series{}.FindByName(testName1), 0)
+ assert.Len(t, m.FindByName(testName1), len(m)-1)
+}
+
+func TestSeries_FindByNames(t *testing.T) {
+ m := newTestSeries()
+ m.Sort()
+ assert.Len(t, m.FindByNames(), 0)
+ assert.Len(t, m.FindByNames(testName1), len(m)-1)
+ assert.Len(t, m.FindByNames(testName1, testName2), len(m))
+}
+
+func TestSeries_Len(t *testing.T) {
+ m := newTestSeries()
+
+ assert.Equal(t, len(m), m.Len())
+}
+
+func TestSeries_Less(t *testing.T) {
+ m := newTestSeries()
+
+ assert.False(t, m.Less(0, 1))
+ assert.True(t, m.Less(4, 0))
+}
+
+func TestSeries_Max(t *testing.T) {
+ m := newTestSeries()
+
+ assert.Equal(t, float64(26), m.Max())
+
+}
+
+func TestSeries_Reset(t *testing.T) {
+ m := newTestSeries()
+ m.Reset()
+
+ assert.Len(t, m, 0)
+
+}
+
+func TestSeries_Sort(t *testing.T) {
+ {
+ m := newTestSeries()
+ m.Sort()
+ assert.Equal(t, testName2, m[0].Name())
+ }
+ {
+ m := Series{}
+ assert.Equal(t, 0.0, m.Max())
+ }
+}
+
+func TestSeries_Swap(t *testing.T) {
+ m := newTestSeries()
+
+ m0 := m[0]
+ m1 := m[1]
+
+ m.Swap(0, 1)
+
+ assert.Equal(t, m0, m[1])
+ assert.Equal(t, m1, m[0])
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/parse.go b/src/go/collectors/go.d.plugin/pkg/prometheus/parse.go
new file mode 100644
index 000000000..db17cb83b
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/parse.go
@@ -0,0 +1,413 @@
+package prometheus
+
+import (
+ "errors"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/prometheus/selector"
+
+ "github.com/prometheus/common/model"
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/prometheus/prometheus/model/textparse"
+)
+
+const (
+ quantileLabel = "quantile"
+ bucketLabel = "le"
+)
+
+const (
+ countSuffix = "_count"
+ sumSuffix = "_sum"
+ bucketSuffix = "_bucket"
+)
+
+type promTextParser struct {
+ metrics MetricFamilies
+ series Series
+
+ sr selector.Selector
+
+ currMF *MetricFamily
+ currSeries labels.Labels
+
+ summaries map[uint64]*Summary
+ histograms map[uint64]*Histogram
+
+ isCount bool
+ isSum bool
+ isQuantile bool
+ isBucket bool
+
+ currQuantile float64
+ currBucket float64
+}
+
+func (p *promTextParser) parseToSeries(text []byte) (Series, error) {
+ p.series.Reset()
+
+ parser := textparse.NewPromParser(text)
+ for {
+ entry, err := parser.Next()
+ if err != nil {
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if entry == textparse.EntryInvalid && strings.HasPrefix(err.Error(), "invalid metric type") {
+ continue
+ }
+ return nil, err
+ }
+
+ switch entry {
+ case textparse.EntrySeries:
+ p.currSeries = p.currSeries[:0]
+
+ parser.Metric(&p.currSeries)
+
+ if p.sr != nil && !p.sr.Matches(p.currSeries) {
+ continue
+ }
+
+ _, _, val := parser.Series()
+ p.series.Add(SeriesSample{Labels: copyLabels(p.currSeries), Value: val})
+ }
+ }
+
+ p.series.Sort()
+
+ return p.series, nil
+}
+
+var reSpace = regexp.MustCompile(`\s+`)
+
+func (p *promTextParser) parseToMetricFamilies(text []byte) (MetricFamilies, error) {
+ p.reset()
+
+ parser := textparse.NewPromParser(text)
+ for {
+ entry, err := parser.Next()
+ if err != nil {
+ if errors.Is(err, io.EOF) {
+ break
+ }
+ if entry == textparse.EntryInvalid && strings.HasPrefix(err.Error(), "invalid metric type") {
+ continue
+ }
+ return nil, err
+ }
+
+ switch entry {
+ case textparse.EntryHelp:
+ name, help := parser.Help()
+ p.setMetricFamilyByName(string(name))
+ p.currMF.help = string(help)
+ if strings.IndexByte(p.currMF.help, '\n') != -1 {
+ // convert multiline to one line because HELP is used as the chart title.
+ p.currMF.help = reSpace.ReplaceAllString(strings.TrimSpace(p.currMF.help), " ")
+ }
+ case textparse.EntryType:
+ name, typ := parser.Type()
+ p.setMetricFamilyByName(string(name))
+ p.currMF.typ = typ
+ case textparse.EntrySeries:
+ p.currSeries = p.currSeries[:0]
+
+ parser.Metric(&p.currSeries)
+
+ if p.sr != nil && !p.sr.Matches(p.currSeries) {
+ continue
+ }
+
+ p.setMetricFamilyBySeries()
+
+ _, _, value := parser.Series()
+
+ switch p.currMF.typ {
+ case model.MetricTypeGauge:
+ p.addGauge(value)
+ case model.MetricTypeCounter:
+ p.addCounter(value)
+ case model.MetricTypeSummary:
+ p.addSummary(value)
+ case model.MetricTypeHistogram:
+ p.addHistogram(value)
+ case model.MetricTypeUnknown:
+ p.addUnknown(value)
+ }
+ }
+ }
+
+ for k, v := range p.metrics {
+ if len(v.Metrics()) == 0 {
+ delete(p.metrics, k)
+ }
+ }
+
+ return p.metrics, nil
+}
+
+func (p *promTextParser) setMetricFamilyByName(name string) {
+ mf, ok := p.metrics[name]
+ if !ok {
+ mf = &MetricFamily{name: name, typ: model.MetricTypeUnknown}
+ p.metrics[name] = mf
+ }
+ p.currMF = mf
+}
+
+func (p *promTextParser) setMetricFamilyBySeries() {
+ p.isSum, p.isCount, p.isQuantile, p.isBucket = false, false, false, false
+ p.currQuantile, p.currBucket = 0, 0
+
+ name := p.currSeries[0].Value
+
+ if p.currMF != nil && p.currMF.name == name {
+ if p.currMF.typ == model.MetricTypeSummary {
+ p.setQuantile()
+ }
+ return
+ }
+
+ typ := model.MetricTypeUnknown
+
+ switch {
+ case strings.HasSuffix(name, sumSuffix):
+ n := strings.TrimSuffix(name, sumSuffix)
+ if mf, ok := p.metrics[n]; ok && isSummaryOrHistogram(mf.typ) {
+ p.isSum = true
+ p.currSeries[0].Value = n
+ p.currMF = mf
+ return
+ }
+ case strings.HasSuffix(name, countSuffix):
+ n := strings.TrimSuffix(name, countSuffix)
+ if mf, ok := p.metrics[n]; ok && isSummaryOrHistogram(mf.typ) {
+ p.isCount = true
+ p.currSeries[0].Value = n
+ p.currMF = mf
+ return
+ }
+ case strings.HasSuffix(name, bucketSuffix):
+ n := strings.TrimSuffix(name, bucketSuffix)
+ if mf, ok := p.metrics[n]; ok && isSummaryOrHistogram(mf.typ) {
+ p.currSeries[0].Value = n
+ p.setBucket()
+ p.currMF = mf
+ return
+ }
+ if p.currSeries.Has(bucketLabel) {
+ p.currSeries[0].Value = n
+ p.setBucket()
+ name = n
+ typ = model.MetricTypeHistogram
+ }
+ case p.currSeries.Has(quantileLabel):
+ typ = model.MetricTypeSummary
+ p.setQuantile()
+ }
+
+ p.setMetricFamilyByName(name)
+ if p.currMF.typ == "" || p.currMF.typ == model.MetricTypeUnknown {
+ p.currMF.typ = typ
+ }
+}
+
+func (p *promTextParser) setQuantile() {
+ if lbs, v, ok := removeLabel(p.currSeries, quantileLabel); ok {
+ p.isQuantile = true
+ p.currSeries = lbs
+ p.currQuantile, _ = strconv.ParseFloat(v, 64)
+ }
+}
+
+func (p *promTextParser) setBucket() {
+ if lbs, v, ok := removeLabel(p.currSeries, bucketLabel); ok {
+ p.isBucket = true
+ p.currSeries = lbs
+ p.currBucket, _ = strconv.ParseFloat(v, 64)
+ }
+}
+
+func (p *promTextParser) addGauge(value float64) {
+ p.currSeries = p.currSeries[1:] // remove "__name__"
+
+ if v := len(p.currMF.metrics); v == cap(p.currMF.metrics) {
+ p.currMF.metrics = append(p.currMF.metrics, Metric{
+ labels: copyLabels(p.currSeries),
+ gauge: &Gauge{value: value},
+ })
+ } else {
+ p.currMF.metrics = p.currMF.metrics[:v+1]
+ if p.currMF.metrics[v].gauge == nil {
+ p.currMF.metrics[v].gauge = &Gauge{}
+ }
+ p.currMF.metrics[v].gauge.value = value
+ p.currMF.metrics[v].labels = p.currMF.metrics[v].labels[:0]
+ p.currMF.metrics[v].labels = append(p.currMF.metrics[v].labels, p.currSeries...)
+ }
+}
+
+func (p *promTextParser) addCounter(value float64) {
+ p.currSeries = p.currSeries[1:] // remove "__name__"
+
+ if v := len(p.currMF.metrics); v == cap(p.currMF.metrics) {
+ p.currMF.metrics = append(p.currMF.metrics, Metric{
+ labels: copyLabels(p.currSeries),
+ counter: &Counter{value: value},
+ })
+ } else {
+ p.currMF.metrics = p.currMF.metrics[:v+1]
+ if p.currMF.metrics[v].counter == nil {
+ p.currMF.metrics[v].counter = &Counter{}
+ }
+ p.currMF.metrics[v].counter.value = value
+ p.currMF.metrics[v].labels = p.currMF.metrics[v].labels[:0]
+ p.currMF.metrics[v].labels = append(p.currMF.metrics[v].labels, p.currSeries...)
+ }
+}
+
+func (p *promTextParser) addUnknown(value float64) {
+ p.currSeries = p.currSeries[1:] // remove "__name__"
+
+ if v := len(p.currMF.metrics); v == cap(p.currMF.metrics) {
+ p.currMF.metrics = append(p.currMF.metrics, Metric{
+ labels: copyLabels(p.currSeries),
+ untyped: &Untyped{value: value},
+ })
+ } else {
+ p.currMF.metrics = p.currMF.metrics[:v+1]
+ if p.currMF.metrics[v].untyped == nil {
+ p.currMF.metrics[v].untyped = &Untyped{}
+ }
+ p.currMF.metrics[v].untyped.value = value
+ p.currMF.metrics[v].labels = p.currMF.metrics[v].labels[:0]
+ p.currMF.metrics[v].labels = append(p.currMF.metrics[v].labels, p.currSeries...)
+ }
+}
+
+func (p *promTextParser) addSummary(value float64) {
+ hash := p.currSeries.Hash()
+
+ p.currSeries = p.currSeries[1:] // remove "__name__"
+
+ s, ok := p.summaries[hash]
+ if !ok {
+ if v := len(p.currMF.metrics); v == cap(p.currMF.metrics) {
+ s = &Summary{}
+ p.currMF.metrics = append(p.currMF.metrics, Metric{
+ labels: copyLabels(p.currSeries),
+ summary: s,
+ })
+ } else {
+ p.currMF.metrics = p.currMF.metrics[:v+1]
+ if p.currMF.metrics[v].summary == nil {
+ p.currMF.metrics[v].summary = &Summary{}
+ }
+ p.currMF.metrics[v].summary.sum = 0
+ p.currMF.metrics[v].summary.count = 0
+ p.currMF.metrics[v].summary.quantiles = p.currMF.metrics[v].summary.quantiles[:0]
+ p.currMF.metrics[v].labels = p.currMF.metrics[v].labels[:0]
+ p.currMF.metrics[v].labels = append(p.currMF.metrics[v].labels, p.currSeries...)
+ s = p.currMF.metrics[v].summary
+ }
+
+ p.summaries[hash] = s
+ }
+
+ switch {
+ case p.isQuantile:
+ s.quantiles = append(s.quantiles, Quantile{quantile: p.currQuantile, value: value})
+ case p.isSum:
+ s.sum = value
+ case p.isCount:
+ s.count = value
+ }
+}
+
+func (p *promTextParser) addHistogram(value float64) {
+ hash := p.currSeries.Hash()
+
+ p.currSeries = p.currSeries[1:] // remove "__name__"
+
+ h, ok := p.histograms[hash]
+ if !ok {
+ if v := len(p.currMF.metrics); v == cap(p.currMF.metrics) {
+ h = &Histogram{}
+ p.currMF.metrics = append(p.currMF.metrics, Metric{
+ labels: copyLabels(p.currSeries),
+ histogram: h,
+ })
+ } else {
+ p.currMF.metrics = p.currMF.metrics[:v+1]
+ if p.currMF.metrics[v].histogram == nil {
+ p.currMF.metrics[v].histogram = &Histogram{}
+ }
+ p.currMF.metrics[v].histogram.sum = 0
+ p.currMF.metrics[v].histogram.count = 0
+ p.currMF.metrics[v].histogram.buckets = p.currMF.metrics[v].histogram.buckets[:0]
+ p.currMF.metrics[v].labels = p.currMF.metrics[v].labels[:0]
+ p.currMF.metrics[v].labels = append(p.currMF.metrics[v].labels, p.currSeries...)
+ h = p.currMF.metrics[v].histogram
+ }
+
+ p.histograms[hash] = h
+ }
+
+ switch {
+ case p.isBucket:
+ h.buckets = append(h.buckets, Bucket{upperBound: p.currBucket, cumulativeCount: value})
+ case p.isSum:
+ h.sum = value
+ case p.isCount:
+ h.count = value
+ }
+}
+
+func (p *promTextParser) reset() {
+ p.currMF = nil
+ p.currSeries = p.currSeries[:0]
+
+ if p.metrics == nil {
+ p.metrics = make(MetricFamilies)
+ }
+ for _, mf := range p.metrics {
+ mf.help = ""
+ mf.typ = ""
+ mf.metrics = mf.metrics[:0]
+ }
+
+ if p.summaries == nil {
+ p.summaries = make(map[uint64]*Summary)
+ }
+ for k := range p.summaries {
+ delete(p.summaries, k)
+ }
+
+ if p.histograms == nil {
+ p.histograms = make(map[uint64]*Histogram)
+ }
+ for k := range p.histograms {
+ delete(p.histograms, k)
+ }
+}
+
+func copyLabels(lbs []labels.Label) []labels.Label {
+ return append([]labels.Label(nil), lbs...)
+}
+
+func removeLabel(lbs labels.Labels, name string) (labels.Labels, string, bool) {
+ for i, v := range lbs {
+ if v.Name == name {
+ return append(lbs[:i], lbs[i+1:]...), v.Value, true
+ }
+ }
+ return lbs, "", false
+}
+
+func isSummaryOrHistogram(typ model.MetricType) bool {
+ return typ == model.MetricTypeSummary || typ == model.MetricTypeHistogram
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/parse_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/parse_test.go
new file mode 100644
index 000000000..453011c07
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/parse_test.go
@@ -0,0 +1,1675 @@
+package prometheus
+
+import (
+ "bytes"
+ "fmt"
+ "math"
+ "os"
+ "testing"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/prometheus/selector"
+
+ "github.com/prometheus/common/model"
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+var (
+ dataMultilineHelp, _ = os.ReadFile("testdata/multiline-help.txt")
+
+ dataGaugeMeta, _ = os.ReadFile("testdata/gauge-meta.txt")
+ dataGaugeNoMeta, _ = os.ReadFile("testdata/gauge-no-meta.txt")
+ dataCounterMeta, _ = os.ReadFile("testdata/counter-meta.txt")
+ dataCounterNoMeta, _ = os.ReadFile("testdata/counter-no-meta.txt")
+ dataSummaryMeta, _ = os.ReadFile("testdata/summary-meta.txt")
+ dataSummaryNoMeta, _ = os.ReadFile("testdata/summary-no-meta.txt")
+ dataHistogramMeta, _ = os.ReadFile("testdata/histogram-meta.txt")
+ dataHistogramNoMeta, _ = os.ReadFile("testdata/histogram-no-meta.txt")
+ dataAllTypes = joinData(
+ dataGaugeMeta, dataGaugeNoMeta, dataCounterMeta, dataCounterNoMeta,
+ dataSummaryMeta, dataSummaryNoMeta, dataHistogramMeta, dataHistogramNoMeta,
+ )
+)
+
+func Test_testParseDataIsValid(t *testing.T) {
+ for name, data := range map[string][]byte{
+ "dataMultilineHelp": dataMultilineHelp,
+ "dataGaugeMeta": dataGaugeMeta,
+ "dataGaugeNoMeta": dataGaugeNoMeta,
+ "dataCounterMeta": dataCounterMeta,
+ "dataCounterNoMeta": dataCounterNoMeta,
+ "dataSummaryMeta": dataSummaryMeta,
+ "dataSummaryNoMeta": dataSummaryNoMeta,
+ "dataHistogramMeta": dataHistogramMeta,
+ "dataHistogramNoMeta": dataHistogramNoMeta,
+ "dataAllTypes": dataAllTypes,
+ } {
+ require.NotNilf(t, data, name)
+ }
+}
+
+func TestPromTextParser_parseToMetricFamilies(t *testing.T) {
+ tests := map[string]struct {
+ input []byte
+ want MetricFamilies
+ }{
+ "Gauge with multiline HELP": {
+ input: dataMultilineHelp,
+ want: MetricFamilies{
+ "test_gauge_metric_1": {
+ name: "test_gauge_metric_1",
+ help: "First line. Second line.",
+ typ: model.MetricTypeGauge,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ gauge: &Gauge{value: 11},
+ },
+ },
+ },
+ },
+ },
+ "Gauge with meta parsed as Gauge": {
+ input: dataGaugeMeta,
+ want: MetricFamilies{
+ "test_gauge_metric_1": {
+ name: "test_gauge_metric_1",
+ help: "Test Gauge Metric 1",
+ typ: model.MetricTypeGauge,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ gauge: &Gauge{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ gauge: &Gauge{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ gauge: &Gauge{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ gauge: &Gauge{value: 14},
+ },
+ },
+ },
+ "test_gauge_metric_2": {
+ name: "test_gauge_metric_2",
+ typ: model.MetricTypeGauge,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ gauge: &Gauge{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ gauge: &Gauge{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ gauge: &Gauge{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ gauge: &Gauge{value: 14},
+ },
+ },
+ },
+ },
+ },
+ "Counter with meta parsed as Counter": {
+ input: dataCounterMeta,
+ want: MetricFamilies{
+ "test_counter_metric_1_total": {
+ name: "test_counter_metric_1_total",
+ help: "Test Counter Metric 1",
+ typ: model.MetricTypeCounter,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ counter: &Counter{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ counter: &Counter{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ counter: &Counter{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ counter: &Counter{value: 14},
+ },
+ },
+ },
+ "test_counter_metric_2_total": {
+ name: "test_counter_metric_2_total",
+ typ: model.MetricTypeCounter,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ counter: &Counter{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ counter: &Counter{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ counter: &Counter{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ counter: &Counter{value: 14},
+ },
+ },
+ },
+ },
+ },
+ "Summary with meta parsed as Summary": {
+ input: dataSummaryMeta,
+ want: MetricFamilies{
+ "test_summary_1_duration_microseconds": {
+ name: "test_summary_1_duration_microseconds",
+ help: "Test Summary Metric 1",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ },
+ },
+ "test_summary_2_duration_microseconds": {
+ name: "test_summary_2_duration_microseconds",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "Histogram with meta parsed as Histogram": {
+ input: dataHistogramMeta,
+ want: MetricFamilies{
+ "test_histogram_1_duration_seconds": {
+ name: "test_histogram_1_duration_seconds",
+ help: "Test Histogram Metric 1",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ },
+ },
+ "test_histogram_2_duration_seconds": {
+ name: "test_histogram_2_duration_seconds",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "Gauge no meta parsed as Untyped": {
+ input: dataGaugeNoMeta,
+ want: MetricFamilies{
+ "test_gauge_no_meta_metric_1": {
+ name: "test_gauge_no_meta_metric_1",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ "test_gauge_no_meta_metric_2": {
+ name: "test_gauge_no_meta_metric_2",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ },
+ },
+ "Counter no meta parsed as Untyped": {
+ input: dataCounterNoMeta,
+ want: MetricFamilies{
+ "test_counter_no_meta_metric_1_total": {
+ name: "test_counter_no_meta_metric_1_total",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ "test_counter_no_meta_metric_2_total": {
+ name: "test_counter_no_meta_metric_2_total",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ },
+ },
+ "Summary no meta parsed as Summary": {
+ input: dataSummaryNoMeta,
+ want: MetricFamilies{
+ "test_summary_no_meta_1_duration_microseconds": {
+ name: "test_summary_no_meta_1_duration_microseconds",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ },
+ },
+ "test_summary_no_meta_2_duration_microseconds": {
+ name: "test_summary_no_meta_2_duration_microseconds",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "Histogram no meta parsed as Histogram": {
+ input: dataHistogramNoMeta,
+ want: MetricFamilies{
+ "test_histogram_no_meta_1_duration_seconds": {
+ name: "test_histogram_no_meta_1_duration_seconds",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ },
+ },
+ "test_histogram_no_meta_2_duration_seconds": {
+ name: "test_histogram_no_meta_2_duration_seconds",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "All types": {
+ input: dataAllTypes,
+ want: MetricFamilies{
+ "test_gauge_metric_1": {
+ name: "test_gauge_metric_1",
+ help: "Test Gauge Metric 1",
+ typ: model.MetricTypeGauge,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ gauge: &Gauge{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ gauge: &Gauge{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ gauge: &Gauge{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ gauge: &Gauge{value: 14},
+ },
+ },
+ },
+ "test_gauge_metric_2": {
+ name: "test_gauge_metric_2",
+ typ: model.MetricTypeGauge,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ gauge: &Gauge{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ gauge: &Gauge{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ gauge: &Gauge{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ gauge: &Gauge{value: 14},
+ },
+ },
+ },
+ "test_counter_metric_1_total": {
+ name: "test_counter_metric_1_total",
+ help: "Test Counter Metric 1",
+ typ: model.MetricTypeCounter,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ counter: &Counter{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ counter: &Counter{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ counter: &Counter{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ counter: &Counter{value: 14},
+ },
+ },
+ },
+ "test_counter_metric_2_total": {
+ name: "test_counter_metric_2_total",
+ typ: model.MetricTypeCounter,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ counter: &Counter{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ counter: &Counter{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ counter: &Counter{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ counter: &Counter{value: 14},
+ },
+ },
+ },
+ "test_summary_1_duration_microseconds": {
+ name: "test_summary_1_duration_microseconds",
+ help: "Test Summary Metric 1",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ },
+ },
+ "test_summary_2_duration_microseconds": {
+ name: "test_summary_2_duration_microseconds",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ },
+ },
+ "test_histogram_1_duration_seconds": {
+ name: "test_histogram_1_duration_seconds",
+ help: "Test Histogram Metric 1",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ },
+ },
+ "test_histogram_2_duration_seconds": {
+ name: "test_histogram_2_duration_seconds",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ },
+ },
+ "test_gauge_no_meta_metric_1": {
+ name: "test_gauge_no_meta_metric_1",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ "test_gauge_no_meta_metric_2": {
+ name: "test_gauge_no_meta_metric_2",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ "test_counter_no_meta_metric_1_total": {
+ name: "test_counter_no_meta_metric_1_total",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ "test_counter_no_meta_metric_2_total": {
+ name: "test_counter_no_meta_metric_2_total",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ untyped: &Untyped{value: 11},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ untyped: &Untyped{value: 12},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ untyped: &Untyped{value: 13},
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ untyped: &Untyped{value: 14},
+ },
+ },
+ },
+ "test_summary_no_meta_1_duration_microseconds": {
+ name: "test_summary_no_meta_1_duration_microseconds",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 283201.29,
+ count: 31,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 4931.921},
+ {quantile: 0.9, value: 4932.921},
+ {quantile: 0.99, value: 4933.921},
+ },
+ },
+ },
+ },
+ },
+ "test_summary_no_meta_2_duration_microseconds": {
+ name: "test_summary_no_meta_2_duration_microseconds",
+ typ: model.MetricTypeSummary,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ summary: &Summary{
+ sum: 383201.29,
+ count: 41,
+ quantiles: []Quantile{
+ {quantile: 0.5, value: 5931.921},
+ {quantile: 0.9, value: 5932.921},
+ {quantile: 0.99, value: 5933.921},
+ },
+ },
+ },
+ },
+ },
+ "test_histogram_no_meta_1_duration_seconds": {
+ name: "test_histogram_no_meta_1_duration_seconds",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00147889,
+ count: 6,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 4},
+ {upperBound: 0.5, cumulativeCount: 5},
+ {upperBound: math.Inf(1), cumulativeCount: 6},
+ },
+ },
+ },
+ },
+ },
+ "test_histogram_no_meta_2_duration_seconds": {
+ name: "test_histogram_no_meta_2_duration_seconds",
+ typ: model.MetricTypeHistogram,
+ metrics: []Metric{
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value1"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value2"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value3"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ {
+ labels: labels.Labels{{Name: "label1", Value: "value4"}},
+ histogram: &Histogram{
+ sum: 0.00247889,
+ count: 9,
+ buckets: []Bucket{
+ {upperBound: 0.1, cumulativeCount: 7},
+ {upperBound: 0.5, cumulativeCount: 8},
+ {upperBound: math.Inf(1), cumulativeCount: 9},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ var p promTextParser
+
+ for i := 0; i < 10; i++ {
+ t.Run(fmt.Sprintf("parse num %d", i+1), func(t *testing.T) {
+ mfs, err := p.parseToMetricFamilies(test.input)
+ if len(test.want) > 0 {
+ assert.Equal(t, test.want, mfs)
+ } else {
+ assert.Error(t, err)
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestPromTextParser_parseToMetricFamiliesWithSelector(t *testing.T) {
+ sr, err := selector.Parse(`test_gauge_metric_1{label1="value2"}`)
+ require.NoError(t, err)
+
+ p := promTextParser{sr: sr}
+
+ txt := []byte(`
+test_gauge_metric_1{label1="value1"} 1
+test_gauge_metric_1{label1="value2"} 1
+test_gauge_metric_2{label1="value1"} 1
+test_gauge_metric_2{label1="value2"} 1
+`)
+
+ want := MetricFamilies{
+ "test_gauge_metric_1": &MetricFamily{
+ name: "test_gauge_metric_1",
+ typ: model.MetricTypeUnknown,
+ metrics: []Metric{
+ {labels: labels.Labels{{Name: "label1", Value: "value2"}}, untyped: &Untyped{value: 1}},
+ },
+ },
+ }
+
+ mfs, err := p.parseToMetricFamilies(txt)
+
+ require.NoError(t, err)
+ assert.Equal(t, want, mfs)
+}
+
+func TestPromTextParser_parseToSeries(t *testing.T) {
+ tests := map[string]struct {
+ input []byte
+ want Series
+ }{
+ "All types": {
+ input: []byte(`
+# HELP test_gauge_metric_1 Test Gauge Metric 1
+# TYPE test_gauge_metric_1 gauge
+test_gauge_metric_1{label1="value1"} 11
+test_gauge_no_meta_metric_1{label1="value1"} 11
+# HELP test_counter_metric_1_total Test Counter Metric 1
+# TYPE test_counter_metric_1_total counter
+test_counter_metric_1_total{label1="value1"} 11
+test_counter_no_meta_metric_1_total{label1="value1"} 11
+# HELP test_summary_1_duration_microseconds Test Summary Metric 1
+# TYPE test_summary_1_duration_microseconds summary
+test_summary_1_duration_microseconds{label1="value1",quantile="0.5"} 4931.921
+test_summary_1_duration_microseconds{label1="value1",quantile="0.9"} 4932.921
+test_summary_1_duration_microseconds{label1="value1",quantile="0.99"} 4933.921
+test_summary_1_duration_microseconds_sum{label1="value1"} 283201.29
+test_summary_1_duration_microseconds_count{label1="value1"} 31
+test_summary_no_meta_1_duration_microseconds{label1="value1",quantile="0.5"} 4931.921
+test_summary_no_meta_1_duration_microseconds{label1="value1",quantile="0.9"} 4932.921
+test_summary_no_meta_1_duration_microseconds{label1="value1",quantile="0.99"} 4933.921
+test_summary_no_meta_1_duration_microseconds_sum{label1="value1"} 283201.29
+test_summary_no_meta_1_duration_microseconds_count{label1="value1"} 31
+# HELP test_histogram_1_duration_seconds Test Histogram Metric 1
+# TYPE test_histogram_1_duration_seconds histogram
+test_histogram_1_duration_seconds_bucket{label1="value1",le="0.1"} 4
+test_histogram_1_duration_seconds_bucket{label1="value1",le="0.5"} 5
+test_histogram_1_duration_seconds_bucket{label1="value1",le="+Inf"} 6
+test_histogram_1_duration_seconds_sum{label1="value1"} 0.00147889
+test_histogram_1_duration_seconds_count{label1="value1"} 6
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value1",le="0.1"} 4
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value1",le="0.5"} 5
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value1",le="+Inf"} 6
+test_histogram_no_meta_1_duration_seconds_sum{label1="value1"} 0.00147889
+test_histogram_no_meta_1_duration_seconds_count{label1="value1"} 6
+`),
+ want: Series{
+ // Gauge
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_gauge_metric_1"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 11,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_gauge_no_meta_metric_1"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 11,
+ },
+ // Counter
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_counter_metric_1_total"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 11,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_counter_no_meta_metric_1_total"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 11,
+ },
+ //// Summary
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_1_duration_microseconds"},
+ {Name: "label1", Value: "value1"},
+ {Name: "quantile", Value: "0.5"},
+ },
+ Value: 4931.921,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_1_duration_microseconds"},
+ {Name: "label1", Value: "value1"},
+ {Name: "quantile", Value: "0.9"},
+ },
+ Value: 4932.921,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_1_duration_microseconds"},
+ {Name: "label1", Value: "value1"},
+ {Name: "quantile", Value: "0.99"},
+ },
+ Value: 4933.921,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_1_duration_microseconds_sum"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 283201.29,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_1_duration_microseconds_count"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 31,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_no_meta_1_duration_microseconds"},
+ {Name: "label1", Value: "value1"},
+ {Name: "quantile", Value: "0.5"},
+ },
+ Value: 4931.921,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_no_meta_1_duration_microseconds"},
+ {Name: "label1", Value: "value1"},
+ {Name: "quantile", Value: "0.9"},
+ },
+ Value: 4932.921,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_no_meta_1_duration_microseconds"},
+ {Name: "label1", Value: "value1"},
+ {Name: "quantile", Value: "0.99"},
+ },
+ Value: 4933.921,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_no_meta_1_duration_microseconds_sum"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 283201.29,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_summary_no_meta_1_duration_microseconds_count"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 31,
+ },
+ // Histogram
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_1_duration_seconds_bucket"},
+ {Name: "label1", Value: "value1"},
+ {Name: "le", Value: "0.1"},
+ },
+ Value: 4,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_1_duration_seconds_bucket"},
+ {Name: "label1", Value: "value1"},
+ {Name: "le", Value: "0.5"},
+ },
+ Value: 5,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_1_duration_seconds_bucket"},
+ {Name: "label1", Value: "value1"},
+ {Name: "le", Value: "+Inf"},
+ },
+ Value: 6,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_1_duration_seconds_sum"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 0.00147889,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_1_duration_seconds_count"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 6,
+ },
+
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_no_meta_1_duration_seconds_bucket"},
+ {Name: "label1", Value: "value1"},
+ {Name: "le", Value: "0.1"},
+ },
+ Value: 4,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_no_meta_1_duration_seconds_bucket"},
+ {Name: "label1", Value: "value1"},
+ {Name: "le", Value: "0.5"},
+ },
+ Value: 5,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_no_meta_1_duration_seconds_bucket"},
+ {Name: "label1", Value: "value1"},
+ {Name: "le", Value: "+Inf"},
+ },
+ Value: 6,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_no_meta_1_duration_seconds_sum"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 0.00147889,
+ },
+ {
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_histogram_no_meta_1_duration_seconds_count"},
+ {Name: "label1", Value: "value1"},
+ },
+ Value: 6,
+ },
+ },
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ var p promTextParser
+
+ for i := 0; i < 10; i++ {
+ t.Run(fmt.Sprintf("parse num %d", i+1), func(t *testing.T) {
+ series, err := p.parseToSeries(test.input)
+
+ if len(test.want) > 0 {
+ test.want.Sort()
+ assert.Equal(t, test.want, series)
+ } else {
+ assert.Error(t, err)
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestPromTextParser_parseToSeriesWithSelector(t *testing.T) {
+ sr, err := selector.Parse(`test_gauge_metric_1{label1="value2"}`)
+ require.NoError(t, err)
+
+ p := promTextParser{sr: sr}
+
+ txt := []byte(`
+test_gauge_metric_1{label1="value1"} 1
+test_gauge_metric_1{label1="value2"} 1
+test_gauge_metric_2{label1="value1"} 1
+test_gauge_metric_2{label1="value2"} 1
+`)
+
+ want := Series{SeriesSample{
+ Labels: labels.Labels{
+ {Name: "__name__", Value: "test_gauge_metric_1"},
+ {Name: "label1", Value: "value2"},
+ },
+ Value: 1,
+ }}
+
+ series, err := p.parseToSeries(txt)
+
+ require.NoError(t, err)
+ assert.Equal(t, want, series)
+}
+
+func joinData(data ...[]byte) []byte {
+ var buf bytes.Buffer
+ for _, v := range data {
+ _, _ = buf.Write(v)
+ _ = buf.WriteByte('\n')
+ }
+ return buf.Bytes()
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/README.md b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/README.md
new file mode 100644
index 000000000..33506c742
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/README.md
@@ -0,0 +1,102 @@
+<!--
+title: "Time series selector"
+custom_edit_url: "https://github.com/netdata/netdata/blob/master/src/go/collectors/go.d.plugin/pkg/prometheus/selector/README.md"
+sidebar_label: "Time series selector"
+learn_status: "Published"
+learn_rel_path: "Developers/External plugins/go.d.plugin/Helper Packages"
+-->
+
+# Time series selector
+
+Selectors allow selecting and filtering of a set of time series.
+
+## Simple Selector
+
+In the simplest form you need to specify only a metric name.
+
+### Syntax
+
+```cmd
+ <line> ::= <metric_name_pattern>
+ <metric_name_pattern> ::= simple pattern
+```
+
+The metric name pattern syntax is [simple pattern](https://github.com/netdata/netdata/blob/master/src/libnetdata/simple_pattern/README.md).
+
+### Examples
+
+This example selects all time series that have the `go_memstats_alloc_bytes` metric name:
+
+```cmd
+go_memstats_alloc_bytes
+```
+
+This example selects all time series with metric names starts with `go_memstats_`:
+
+```cmd
+go_memstats_*
+```
+
+This example selects all time series with metric names starts with `go_` except `go_memstats_`:
+
+```cmd
+!go_memstats_* go_*
+```
+
+## Advanced Selector
+
+It is possible to filter these time series further by appending a comma separated list of label matchers in curly braces (`{}`).
+
+### Syntax
+
+```cmd
+ <line> ::= [ <metric_name_pattern> ]{ <list_of_selectors> }
+ <metric_name_pattern> ::= simple pattern
+ <list_of_selectors> ::= a comma separated list <label_name><op><label_value_pattern>
+ <label_name> ::= an exact label name
+ <op> ::= [ '=', '!=', '=~', '!~', '=*', '!*' ]
+ <label_value_pattern> ::= a label value pattern, depends on <op>
+```
+
+The metric name pattern syntax is [simple pattern](https://github.com/netdata/netdata/blob/master/src/libnetdata/simple_pattern/README.md).
+
+Label matching operators:
+
+- `=`: Match labels that are exactly equal to the provided string.
+- `!=`: Match labels that are not equal to the provided string.
+- `=~`: Match labels that [regex-match](https://golang.org/pkg/regexp/syntax/) the provided string.
+- `!~`: Match labels that do not [regex-match](https://golang.org/pkg/regexp/syntax/) the provided string.
+- `=*`: Match labels that [simple-pattern-match](https://github.com/netdata/netdata/blob/master/src/libnetdata/simple_pattern/README.md) the provided string.
+- `!*`: Match labels that do not [simple-pattern-match](https://github.com/netdata/netdata/blob/master/src/libnetdata/simple_pattern/README.md) the provided string.
+
+### Examples
+
+This example selects all time series that:
+
+- have the `node_cooling_device_cur_state` metric name and
+- label `type` value not equal to `Fan`:
+
+```cmd
+node_cooling_device_cur_state{type!="Fan"}
+```
+
+This example selects all time series that:
+
+- have the `node_filesystem_size_bytes` metric name and
+- label `device` value is either `/dev/nvme0n1p1` or `/dev/nvme0n1p2` and
+- label `fstype` is equal to `ext4`
+
+```cmd
+node_filesystem_size_bytes{device=~"/dev/nvme0n1p1$|/dev/nvme0n1p2$",fstype="ext4"}
+```
+
+Label matchers can also be applied to metric names by matching against the internal `__name__` label.
+
+For example, the expression `node_filesystem_size_bytes` is equivalent to `{__name__="node_filesystem_size_bytes"}`.
+This allows using all operators (other than `=*`) for metric names matching.
+
+The following expression selects all metrics that have a name starting with `node_`:
+
+```cmd
+{__name__=*"node_*"}
+```
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr.go
new file mode 100644
index 000000000..7593513a5
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr.go
@@ -0,0 +1,62 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import "fmt"
+
+type Expr struct {
+ Allow []string `yaml:"allow" json:"allow"`
+ Deny []string `yaml:"deny" json:"deny"`
+}
+
+func (e Expr) Empty() bool {
+ return len(e.Allow) == 0 && len(e.Deny) == 0
+
+}
+
+func (e Expr) Parse() (Selector, error) {
+ if e.Empty() {
+ return nil, nil
+ }
+
+ var srs []Selector
+ var allow Selector
+ var deny Selector
+
+ for _, item := range e.Allow {
+ sr, err := Parse(item)
+ if err != nil {
+ return nil, fmt.Errorf("parse selector '%s': %v", item, err)
+ }
+ srs = append(srs, sr)
+ }
+
+ switch len(srs) {
+ case 0:
+ allow = trueSelector{}
+ case 1:
+ allow = srs[0]
+ default:
+ allow = Or(srs[0], srs[1], srs[2:]...)
+ }
+
+ srs = srs[:0]
+ for _, item := range e.Deny {
+ sr, err := Parse(item)
+ if err != nil {
+ return nil, fmt.Errorf("parse selector '%s': %v", item, err)
+ }
+ srs = append(srs, sr)
+ }
+
+ switch len(srs) {
+ case 0:
+ deny = falseSelector{}
+ case 1:
+ deny = srs[0]
+ default:
+ deny = Or(srs[0], srs[1], srs[2:]...)
+ }
+
+ return And(allow, Not(deny)), nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr_test.go
new file mode 100644
index 000000000..598cef9b8
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/expr_test.go
@@ -0,0 +1,231 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "testing"
+
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestExpr_Empty(t *testing.T) {
+ tests := map[string]struct {
+ expr Expr
+ expected bool
+ }{
+ "empty: both allow and deny": {
+ expr: Expr{
+ Allow: []string{},
+ Deny: []string{},
+ },
+ expected: true,
+ },
+ "nil: both allow and deny": {
+ expected: true,
+ },
+ "nil, empty: allow, deny": {
+ expr: Expr{
+ Deny: []string{""},
+ },
+ expected: false,
+ },
+ "empty, nil: allow, deny": {
+ expr: Expr{
+ Allow: []string{""},
+ },
+ expected: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ if test.expected {
+ assert.True(t, test.expr.Empty())
+ } else {
+ assert.False(t, test.expr.Empty())
+ }
+ })
+ }
+}
+
+func TestExpr_Parse(t *testing.T) {
+ tests := map[string]struct {
+ expr Expr
+ expectedSr Selector
+ expectedErr bool
+ }{
+ "not set: both allow and deny": {
+ expr: Expr{},
+ },
+ "set: both allow and deny": {
+ expr: Expr{
+ Allow: []string{
+ "go_memstats_*",
+ "node_*",
+ },
+ Deny: []string{
+ "go_memstats_frees_total",
+ "node_cooling_*",
+ },
+ },
+ expectedSr: andSelector{
+ lhs: orSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustSPName("node_*"),
+ },
+ rhs: Not(orSelector{
+ lhs: mustSPName("go_memstats_frees_total"),
+ rhs: mustSPName("node_cooling_*"),
+ }),
+ },
+ },
+ "set: only includes": {
+ expr: Expr{
+ Allow: []string{
+ "go_memstats_*",
+ "node_*",
+ },
+ },
+ expectedSr: andSelector{
+ lhs: orSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustSPName("node_*"),
+ },
+ rhs: Not(falseSelector{}),
+ },
+ },
+ "set: only excludes": {
+ expr: Expr{
+ Deny: []string{
+ "go_memstats_frees_total",
+ "node_cooling_*",
+ },
+ },
+ expectedSr: andSelector{
+ lhs: trueSelector{},
+ rhs: Not(orSelector{
+ lhs: mustSPName("go_memstats_frees_total"),
+ rhs: mustSPName("node_cooling_*"),
+ }),
+ },
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ m, err := test.expr.Parse()
+
+ if test.expectedErr {
+ assert.Error(t, err)
+ } else {
+ assert.Equal(t, test.expectedSr, m)
+ }
+ })
+ }
+}
+
+func TestExprSelector_Matches(t *testing.T) {
+ tests := map[string]struct {
+ expr Expr
+ lbs labels.Labels
+ expectedMatches bool
+ }{
+ "allow matches: single pattern": {
+ expr: Expr{
+ Allow: []string{"go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: true,
+ },
+ "allow matches: several patterns": {
+ expr: Expr{
+ Allow: []string{"node_*", "go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: true,
+ },
+ "allow not matches": {
+ expr: Expr{
+ Allow: []string{"node_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ "deny matches: single pattern": {
+ expr: Expr{
+ Deny: []string{"go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ "deny matches: several patterns": {
+ expr: Expr{
+ Deny: []string{"node_*", "go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ "deny not matches": {
+ expr: Expr{
+ Deny: []string{"node_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: true,
+ },
+ "allow and deny matches: single pattern": {
+ expr: Expr{
+ Allow: []string{"go_*"},
+ Deny: []string{"go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ "allow and deny matches: several patterns": {
+ expr: Expr{
+ Allow: []string{"node_*", "go_*"},
+ Deny: []string{"node_*", "go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ "allow matches and deny not matches": {
+ expr: Expr{
+ Allow: []string{"go_*"},
+ Deny: []string{"node_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: true,
+ },
+ "allow not matches and deny matches": {
+ expr: Expr{
+ Allow: []string{"node_*"},
+ Deny: []string{"go_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ "allow not matches and deny not matches": {
+ expr: Expr{
+ Allow: []string{"node_*"},
+ Deny: []string{"node_*"},
+ },
+ lbs: []labels.Label{{Name: labels.MetricName, Value: "go_memstats_alloc_bytes"}},
+ expectedMatches: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ sr, err := test.expr.Parse()
+ require.NoError(t, err)
+
+ if test.expectedMatches {
+ assert.True(t, sr.Matches(test.lbs))
+ } else {
+ assert.False(t, sr.Matches(test.lbs))
+ }
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical.go
new file mode 100644
index 000000000..1556d1715
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical.go
@@ -0,0 +1,49 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "github.com/prometheus/prometheus/model/labels"
+)
+
+type (
+ trueSelector struct{}
+ falseSelector struct{}
+ negSelector struct{ s Selector }
+ andSelector struct{ lhs, rhs Selector }
+ orSelector struct{ lhs, rhs Selector }
+)
+
+func (trueSelector) Matches(_ labels.Labels) bool { return true }
+func (falseSelector) Matches(_ labels.Labels) bool { return false }
+func (s negSelector) Matches(lbs labels.Labels) bool { return !s.s.Matches(lbs) }
+func (s andSelector) Matches(lbs labels.Labels) bool { return s.lhs.Matches(lbs) && s.rhs.Matches(lbs) }
+func (s orSelector) Matches(lbs labels.Labels) bool { return s.lhs.Matches(lbs) || s.rhs.Matches(lbs) }
+
+// True returns a selector which always returns true
+func True() Selector {
+ return trueSelector{}
+}
+
+// And returns a selector which returns true only if all of it's sub-selectors return true
+func And(lhs, rhs Selector, others ...Selector) Selector {
+ s := andSelector{lhs: lhs, rhs: rhs}
+ if len(others) == 0 {
+ return s
+ }
+ return And(s, others[0], others[1:]...)
+}
+
+// Or returns a selector which returns true if any of it's sub-selectors return true
+func Or(lhs, rhs Selector, others ...Selector) Selector {
+ s := orSelector{lhs: lhs, rhs: rhs}
+ if len(others) == 0 {
+ return s
+ }
+ return Or(s, others[0], others[1:]...)
+}
+
+// Not returns a selector which returns the negation of the sub-selector's result
+func Not(s Selector) Selector {
+ return negSelector{s}
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical_test.go
new file mode 100644
index 000000000..239c7f715
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/logical_test.go
@@ -0,0 +1,226 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "testing"
+
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestTrueSelector_Matches(t *testing.T) {
+ tests := map[string]struct {
+ sr trueSelector
+ lbs labels.Labels
+ expected bool
+ }{
+ "not empty labels": {
+ lbs: labels.Labels{{Name: labels.MetricName, Value: "name"}},
+ expected: true,
+ },
+ "empty labels": {
+ expected: true,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ if test.expected {
+ assert.True(t, test.sr.Matches(test.lbs))
+ } else {
+ assert.False(t, test.sr.Matches(test.lbs))
+ }
+ })
+ }
+}
+
+func TestFalseSelector_Matches(t *testing.T) {
+ tests := map[string]struct {
+ sr falseSelector
+ lbs labels.Labels
+ expected bool
+ }{
+ "not empty labels": {
+ lbs: labels.Labels{{Name: labels.MetricName, Value: "name"}},
+ expected: false,
+ },
+ "empty labels": {
+ expected: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ if test.expected {
+ assert.True(t, test.sr.Matches(test.lbs))
+ } else {
+ assert.False(t, test.sr.Matches(test.lbs))
+ }
+ })
+ }
+}
+
+func TestNegSelector_Matches(t *testing.T) {
+ tests := map[string]struct {
+ sr negSelector
+ lbs labels.Labels
+ expected bool
+ }{
+ "true matcher": {
+ sr: negSelector{trueSelector{}},
+ lbs: labels.Labels{{Name: labels.MetricName, Value: "name"}},
+ expected: false,
+ },
+ "false matcher": {
+ sr: negSelector{falseSelector{}},
+ lbs: labels.Labels{{Name: labels.MetricName, Value: "name"}},
+ expected: true,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ if test.expected {
+ assert.True(t, test.sr.Matches(test.lbs))
+ } else {
+ assert.False(t, test.sr.Matches(test.lbs))
+ }
+ })
+ }
+}
+
+func TestAndSelector_Matches(t *testing.T) {
+ tests := map[string]struct {
+ sr andSelector
+ lbs labels.Labels
+ expected bool
+ }{
+ "true, true": {
+ sr: andSelector{lhs: trueSelector{}, rhs: trueSelector{}},
+ expected: true,
+ },
+ "true, false": {
+ sr: andSelector{lhs: trueSelector{}, rhs: falseSelector{}},
+ expected: false,
+ },
+ "false, true": {
+ sr: andSelector{lhs: trueSelector{}, rhs: falseSelector{}},
+ expected: false,
+ },
+ "false, false": {
+ sr: andSelector{lhs: falseSelector{}, rhs: falseSelector{}},
+ expected: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.expected, test.sr.Matches(test.lbs))
+ })
+ }
+}
+
+func TestOrSelector_Matches(t *testing.T) {
+ tests := map[string]struct {
+ sr orSelector
+ lbs labels.Labels
+ expected bool
+ }{
+ "true, true": {
+ sr: orSelector{lhs: trueSelector{}, rhs: trueSelector{}},
+ expected: true,
+ },
+ "true, false": {
+ sr: orSelector{lhs: trueSelector{}, rhs: falseSelector{}},
+ expected: true,
+ },
+ "false, true": {
+ sr: orSelector{lhs: trueSelector{}, rhs: falseSelector{}},
+ expected: true,
+ },
+ "false, false": {
+ sr: orSelector{lhs: falseSelector{}, rhs: falseSelector{}},
+ expected: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ assert.Equal(t, test.expected, test.sr.Matches(test.lbs))
+ })
+ }
+}
+
+func Test_And(t *testing.T) {
+ tests := map[string]struct {
+ srs []Selector
+ expected Selector
+ }{
+ "2 selectors": {
+ srs: []Selector{trueSelector{}, trueSelector{}},
+ expected: andSelector{
+ lhs: trueSelector{},
+ rhs: trueSelector{},
+ },
+ },
+ "4 selectors": {
+ srs: []Selector{trueSelector{}, trueSelector{}, trueSelector{}, trueSelector{}},
+ expected: andSelector{
+ lhs: andSelector{
+ lhs: andSelector{
+ lhs: trueSelector{},
+ rhs: trueSelector{},
+ },
+ rhs: trueSelector{},
+ },
+ rhs: trueSelector{}},
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ require.GreaterOrEqual(t, len(test.srs), 2)
+
+ s := And(test.srs[0], test.srs[1], test.srs[2:]...)
+ assert.Equal(t, test.expected, s)
+ })
+ }
+}
+
+func Test_Or(t *testing.T) {
+ tests := map[string]struct {
+ srs []Selector
+ expected Selector
+ }{
+ "2 selectors": {
+ srs: []Selector{trueSelector{}, trueSelector{}},
+ expected: orSelector{
+ lhs: trueSelector{},
+ rhs: trueSelector{},
+ },
+ },
+ "4 selectors": {
+ srs: []Selector{trueSelector{}, trueSelector{}, trueSelector{}, trueSelector{}},
+ expected: orSelector{
+ lhs: orSelector{
+ lhs: orSelector{
+ lhs: trueSelector{},
+ rhs: trueSelector{},
+ },
+ rhs: trueSelector{},
+ },
+ rhs: trueSelector{}},
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ require.GreaterOrEqual(t, len(test.srs), 2)
+
+ s := Or(test.srs[0], test.srs[1], test.srs[2:]...)
+ assert.Equal(t, test.expected, s)
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse.go
new file mode 100644
index 000000000..29c1d4fbf
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse.go
@@ -0,0 +1,97 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/matcher"
+)
+
+var (
+ reLV = regexp.MustCompile(`^(?P<label_name>[a-zA-Z0-9_]+)(?P<op>=~|!~|=\*|!\*|=|!=)"(?P<pattern>.+)"$`)
+)
+
+func Parse(expr string) (Selector, error) {
+ var srs []Selector
+ lvs := strings.Split(unsugarExpr(expr), ",")
+
+ for _, lv := range lvs {
+ sr, err := parseSelector(lv)
+ if err != nil {
+ return nil, err
+ }
+ srs = append(srs, sr)
+ }
+
+ switch len(srs) {
+ case 0:
+ return nil, nil
+ case 1:
+ return srs[0], nil
+ default:
+ return And(srs[0], srs[1], srs[2:]...), nil
+ }
+}
+
+func parseSelector(line string) (Selector, error) {
+ sub := reLV.FindStringSubmatch(strings.TrimSpace(line))
+ if sub == nil {
+ return nil, fmt.Errorf("invalid selector syntax: '%s'", line)
+ }
+
+ name, op, pattern := sub[1], sub[2], strings.Trim(sub[3], "\"")
+
+ var m matcher.Matcher
+ var err error
+
+ switch op {
+ case OpEqual, OpNegEqual:
+ m, err = matcher.NewStringMatcher(pattern, true, true)
+ case OpRegexp, OpNegRegexp:
+ m, err = matcher.NewRegExpMatcher(pattern)
+ case OpSimplePatterns, OpNegSimplePatterns:
+ m, err = matcher.NewSimplePatternsMatcher(pattern)
+ default:
+ err = fmt.Errorf("unknown matching operator: %s", op)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ sr := labelSelector{
+ name: name,
+ m: m,
+ }
+
+ if neg := strings.HasPrefix(op, "!"); neg {
+ return Not(sr), nil
+ }
+ return sr, nil
+}
+
+func unsugarExpr(expr string) string {
+ // name => __name__=*"name"
+ // name{label="value"} => __name__=*"name",label="value"
+ // {label="value"} => label="value"
+ expr = strings.TrimSpace(expr)
+
+ switch idx := strings.IndexByte(expr, '{'); true {
+ case idx == -1:
+ expr = fmt.Sprintf(`__name__%s"%s"`,
+ OpSimplePatterns,
+ strings.TrimSpace(expr),
+ )
+ case idx == 0:
+ expr = strings.Trim(expr, "{}")
+ default:
+ expr = fmt.Sprintf(`__name__%s"%s",%s`,
+ OpSimplePatterns,
+ strings.TrimSpace(expr[:idx]),
+ strings.Trim(expr[idx:], "{}"),
+ )
+ }
+ return expr
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse_test.go
new file mode 100644
index 000000000..ba764e039
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/parse_test.go
@@ -0,0 +1,117 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/matcher"
+
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParse(t *testing.T) {
+ tests := map[string]struct {
+ input string
+ expectedSr Selector
+ expectedErr bool
+ }{
+ "sp op: only metric name": {
+ input: "go_memstats_alloc_bytes !go_memstats_* *",
+ expectedSr: mustSPName("go_memstats_alloc_bytes !go_memstats_* *"),
+ },
+ "string op: metric name with labels": {
+ input: fmt.Sprintf(`go_memstats_*{label%s"value"}`, OpEqual),
+ expectedSr: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustString("label", "value"),
+ },
+ },
+ "neg string op: metric name with labels": {
+ input: fmt.Sprintf(`go_memstats_*{label%s"value"}`, OpNegEqual),
+ expectedSr: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: Not(mustString("label", "value")),
+ },
+ },
+ "regexp op: metric name with labels": {
+ input: fmt.Sprintf(`go_memstats_*{label%s"valu.+"}`, OpRegexp),
+ expectedSr: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustRegexp("label", "valu.+"),
+ },
+ },
+ "neg regexp op: metric name with labels": {
+ input: fmt.Sprintf(`go_memstats_*{label%s"valu.+"}`, OpNegRegexp),
+ expectedSr: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: Not(mustRegexp("label", "valu.+")),
+ },
+ },
+ "sp op: metric name with labels": {
+ input: fmt.Sprintf(`go_memstats_*{label%s"valu*"}`, OpSimplePatterns),
+ expectedSr: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustSP("label", "valu*"),
+ },
+ },
+ "neg sp op: metric name with labels": {
+ input: fmt.Sprintf(`go_memstats_*{label%s"valu*"}`, OpNegSimplePatterns),
+ expectedSr: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: Not(mustSP("label", "valu*")),
+ },
+ },
+ "metric name with several labels": {
+ input: fmt.Sprintf(`go_memstats_*{label1%s"value1",label2%s"value2"}`, OpEqual, OpEqual),
+ expectedSr: andSelector{
+ lhs: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustString("label1", "value1"),
+ },
+ rhs: mustString("label2", "value2"),
+ },
+ },
+ "only labels (unsugar)": {
+ input: fmt.Sprintf(`{__name__%s"go_memstats_*",label1%s"value1",label2%s"value2"}`,
+ OpSimplePatterns, OpEqual, OpEqual),
+ expectedSr: andSelector{
+ lhs: andSelector{
+ lhs: mustSPName("go_memstats_*"),
+ rhs: mustString("label1", "value1"),
+ },
+ rhs: mustString("label2", "value2"),
+ },
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ sr, err := Parse(test.input)
+
+ if test.expectedErr {
+ assert.Error(t, err)
+ } else {
+ assert.Equal(t, test.expectedSr, sr)
+ }
+ })
+ }
+}
+
+func mustSPName(pattern string) Selector {
+ return mustSP(labels.MetricName, pattern)
+}
+
+func mustString(name string, pattern string) Selector {
+ return labelSelector{name: name, m: matcher.Must(matcher.NewStringMatcher(pattern, true, true))}
+}
+
+func mustRegexp(name string, pattern string) Selector {
+ return labelSelector{name: name, m: matcher.Must(matcher.NewRegExpMatcher(pattern))}
+}
+
+func mustSP(name string, pattern string) Selector {
+ return labelSelector{name: name, m: matcher.Must(matcher.NewSimplePatternsMatcher(pattern))}
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector.go
new file mode 100644
index 000000000..28203fca1
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector.go
@@ -0,0 +1,52 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/matcher"
+
+ "github.com/prometheus/prometheus/model/labels"
+)
+
+type Selector interface {
+ Matches(lbs labels.Labels) bool
+}
+
+const (
+ OpEqual = "="
+ OpNegEqual = "!="
+ OpRegexp = "=~"
+ OpNegRegexp = "!~"
+ OpSimplePatterns = "=*"
+ OpNegSimplePatterns = "!*"
+)
+
+type labelSelector struct {
+ name string
+ m matcher.Matcher
+}
+
+func (s labelSelector) Matches(lbs labels.Labels) bool {
+ if s.name == labels.MetricName {
+ return s.m.MatchString(lbs[0].Value)
+ }
+ if label, ok := lookupLabel(s.name, lbs[1:]); ok {
+ return s.m.MatchString(label.Value)
+ }
+ return false
+}
+
+type Func func(lbs labels.Labels) bool
+
+func (fn Func) Matches(lbs labels.Labels) bool {
+ return fn(lbs)
+}
+
+func lookupLabel(name string, lbs labels.Labels) (labels.Label, bool) {
+ for _, label := range lbs {
+ if label.Name == name {
+ return label, true
+ }
+ }
+ return labels.Label{}, false
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector_test.go b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector_test.go
new file mode 100644
index 000000000..aa3110b03
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/selector/selector_test.go
@@ -0,0 +1,11 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package selector
+
+import (
+ "testing"
+)
+
+func TestLabelMatcher_Matches(t *testing.T) {
+
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-meta.txt
new file mode 100644
index 000000000..53eccda63
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-meta.txt
@@ -0,0 +1,11 @@
+# HELP test_counter_metric_1_total Test Counter Metric 1
+# TYPE test_counter_metric_1_total counter
+test_counter_metric_1_total{label1="value1"} 11
+test_counter_metric_1_total{label1="value2"} 12
+test_counter_metric_1_total{label1="value3"} 13
+test_counter_metric_1_total{label1="value4"} 14
+# TYPE test_counter_metric_2_total counter
+test_counter_metric_2_total{label1="value1"} 11
+test_counter_metric_2_total{label1="value2"} 12
+test_counter_metric_2_total{label1="value3"} 13
+test_counter_metric_2_total{label1="value4"} 14
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-no-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-no-meta.txt
new file mode 100644
index 000000000..afb11b9b8
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/counter-no-meta.txt
@@ -0,0 +1,8 @@
+test_counter_no_meta_metric_1_total{label1="value1"} 11
+test_counter_no_meta_metric_1_total{label1="value2"} 12
+test_counter_no_meta_metric_1_total{label1="value3"} 13
+test_counter_no_meta_metric_1_total{label1="value4"} 14
+test_counter_no_meta_metric_2_total{label1="value1"} 11
+test_counter_no_meta_metric_2_total{label1="value2"} 12
+test_counter_no_meta_metric_2_total{label1="value3"} 13
+test_counter_no_meta_metric_2_total{label1="value4"} 14
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-meta.txt
new file mode 100644
index 000000000..c0773a426
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-meta.txt
@@ -0,0 +1,11 @@
+# HELP test_gauge_metric_1 Test Gauge Metric 1
+# TYPE test_gauge_metric_1 gauge
+test_gauge_metric_1{label1="value1"} 11
+test_gauge_metric_1{label1="value2"} 12
+test_gauge_metric_1{label1="value3"} 13
+test_gauge_metric_1{label1="value4"} 14
+# TYPE test_gauge_metric_2 gauge
+test_gauge_metric_2{label1="value1"} 11
+test_gauge_metric_2{label1="value2"} 12
+test_gauge_metric_2{label1="value3"} 13
+test_gauge_metric_2{label1="value4"} 14
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-no-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-no-meta.txt
new file mode 100644
index 000000000..e89e0e4d9
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/gauge-no-meta.txt
@@ -0,0 +1,8 @@
+test_gauge_no_meta_metric_1{label1="value1"} 11
+test_gauge_no_meta_metric_1{label1="value2"} 12
+test_gauge_no_meta_metric_1{label1="value3"} 13
+test_gauge_no_meta_metric_1{label1="value4"} 14
+test_gauge_no_meta_metric_2{label1="value1"} 11
+test_gauge_no_meta_metric_2{label1="value2"} 12
+test_gauge_no_meta_metric_2{label1="value3"} 13
+test_gauge_no_meta_metric_2{label1="value4"} 14
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-meta.txt
new file mode 100644
index 000000000..9b4b8a965
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-meta.txt
@@ -0,0 +1,43 @@
+# HELP test_histogram_1_duration_seconds Test Histogram Metric 1
+# TYPE test_histogram_1_duration_seconds histogram
+test_histogram_1_duration_seconds_bucket{label1="value1",le="0.1"} 4
+test_histogram_1_duration_seconds_bucket{label1="value1",le="0.5"} 5
+test_histogram_1_duration_seconds_bucket{label1="value1",le="+Inf"} 6
+test_histogram_1_duration_seconds_sum{label1="value1"} 0.00147889
+test_histogram_1_duration_seconds_count{label1="value1"} 6
+test_histogram_1_duration_seconds_bucket{label1="value2",le="0.1"} 4
+test_histogram_1_duration_seconds_bucket{label1="value2",le="0.5"} 5
+test_histogram_1_duration_seconds_bucket{label1="value2",le="+Inf"} 6
+test_histogram_1_duration_seconds_sum{label1="value2"} 0.00147889
+test_histogram_1_duration_seconds_count{label1="value2"} 6
+test_histogram_1_duration_seconds_bucket{label1="value3",le="0.1"} 4
+test_histogram_1_duration_seconds_bucket{label1="value3",le="0.5"} 5
+test_histogram_1_duration_seconds_bucket{label1="value3",le="+Inf"} 6
+test_histogram_1_duration_seconds_sum{label1="value3"} 0.00147889
+test_histogram_1_duration_seconds_count{label1="value3"} 6
+test_histogram_1_duration_seconds_bucket{label1="value4",le="0.1"} 4
+test_histogram_1_duration_seconds_bucket{label1="value4",le="0.5"} 5
+test_histogram_1_duration_seconds_bucket{label1="value4",le="+Inf"} 6
+test_histogram_1_duration_seconds_sum{label1="value4"} 0.00147889
+test_histogram_1_duration_seconds_count{label1="value4"} 6
+# TYPE test_histogram_2_duration_seconds histogram
+test_histogram_2_duration_seconds_bucket{label1="value1",le="0.1"} 7
+test_histogram_2_duration_seconds_bucket{label1="value1",le="0.5"} 8
+test_histogram_2_duration_seconds_bucket{label1="value1",le="+Inf"} 9
+test_histogram_2_duration_seconds_sum{label1="value1"} 0.00247889
+test_histogram_2_duration_seconds_count{label1="value1"} 9
+test_histogram_2_duration_seconds_bucket{label1="value2",le="0.1"} 7
+test_histogram_2_duration_seconds_bucket{label1="value2",le="0.5"} 8
+test_histogram_2_duration_seconds_bucket{label1="value2",le="+Inf"} 9
+test_histogram_2_duration_seconds_sum{label1="value2"} 0.00247889
+test_histogram_2_duration_seconds_count{label1="value2"} 9
+test_histogram_2_duration_seconds_bucket{label1="value3",le="0.1"} 7
+test_histogram_2_duration_seconds_bucket{label1="value3",le="0.5"} 8
+test_histogram_2_duration_seconds_bucket{label1="value3",le="+Inf"} 9
+test_histogram_2_duration_seconds_sum{label1="value3"} 0.00247889
+test_histogram_2_duration_seconds_count{label1="value3"} 9
+test_histogram_2_duration_seconds_bucket{label1="value4",le="0.1"} 7
+test_histogram_2_duration_seconds_bucket{label1="value4",le="0.5"} 8
+test_histogram_2_duration_seconds_bucket{label1="value4",le="+Inf"} 9
+test_histogram_2_duration_seconds_sum{label1="value4"} 0.00247889
+test_histogram_2_duration_seconds_count{label1="value4"} 9
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-no-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-no-meta.txt
new file mode 100644
index 000000000..49def677c
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/histogram-no-meta.txt
@@ -0,0 +1,40 @@
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value1",le="0.1"} 4
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value1",le="0.5"} 5
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value1",le="+Inf"} 6
+test_histogram_no_meta_1_duration_seconds_sum{label1="value1"} 0.00147889
+test_histogram_no_meta_1_duration_seconds_count{label1="value1"} 6
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value2",le="0.1"} 4
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value2",le="0.5"} 5
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value2",le="+Inf"} 6
+test_histogram_no_meta_1_duration_seconds_sum{label1="value2"} 0.00147889
+test_histogram_no_meta_1_duration_seconds_count{label1="value2"} 6
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value3",le="0.1"} 4
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value3",le="0.5"} 5
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value3",le="+Inf"} 6
+test_histogram_no_meta_1_duration_seconds_sum{label1="value3"} 0.00147889
+test_histogram_no_meta_1_duration_seconds_count{label1="value3"} 6
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value4",le="0.1"} 4
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value4",le="0.5"} 5
+test_histogram_no_meta_1_duration_seconds_bucket{label1="value4",le="+Inf"} 6
+test_histogram_no_meta_1_duration_seconds_sum{label1="value4"} 0.00147889
+test_histogram_no_meta_1_duration_seconds_count{label1="value4"} 6
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value1",le="0.1"} 7
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value1",le="0.5"} 8
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value1",le="+Inf"} 9
+test_histogram_no_meta_2_duration_seconds_sum{label1="value1"} 0.00247889
+test_histogram_no_meta_2_duration_seconds_count{label1="value1"} 9
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value2",le="0.1"} 7
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value2",le="0.5"} 8
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value2",le="+Inf"} 9
+test_histogram_no_meta_2_duration_seconds_sum{label1="value2"} 0.00247889
+test_histogram_no_meta_2_duration_seconds_count{label1="value2"} 9
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value3",le="0.1"} 7
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value3",le="0.5"} 8
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value3",le="+Inf"} 9
+test_histogram_no_meta_2_duration_seconds_sum{label1="value3"} 0.00247889
+test_histogram_no_meta_2_duration_seconds_count{label1="value3"} 9
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value4",le="0.1"} 7
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value4",le="0.5"} 8
+test_histogram_no_meta_2_duration_seconds_bucket{label1="value4",le="+Inf"} 9
+test_histogram_no_meta_2_duration_seconds_sum{label1="value4"} 0.00247889
+test_histogram_no_meta_2_duration_seconds_count{label1="value4"} 9
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/multiline-help.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/multiline-help.txt
new file mode 100644
index 000000000..f1598fcce
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/multiline-help.txt
@@ -0,0 +1,3 @@
+# HELP test_gauge_metric_1 \n First line.\n Second line.\n
+# TYPE test_gauge_metric_1 gauge
+test_gauge_metric_1{label1="value1"} 11
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-meta.txt
new file mode 100644
index 000000000..3056e8076
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-meta.txt
@@ -0,0 +1,43 @@
+# HELP test_summary_1_duration_microseconds Test Summary Metric 1
+# TYPE test_summary_1_duration_microseconds summary
+test_summary_1_duration_microseconds{label1="value1",quantile="0.5"} 4931.921
+test_summary_1_duration_microseconds{label1="value1",quantile="0.9"} 4932.921
+test_summary_1_duration_microseconds{label1="value1",quantile="0.99"} 4933.921
+test_summary_1_duration_microseconds_sum{label1="value1"} 283201.29
+test_summary_1_duration_microseconds_count{label1="value1"} 31
+test_summary_1_duration_microseconds{label1="value2",quantile="0.5"} 4931.921
+test_summary_1_duration_microseconds{label1="value2",quantile="0.9"} 4932.921
+test_summary_1_duration_microseconds{label1="value2",quantile="0.99"} 4933.921
+test_summary_1_duration_microseconds_sum{label1="value2"} 283201.29
+test_summary_1_duration_microseconds_count{label1="value2"} 31
+test_summary_1_duration_microseconds{label1="value3",quantile="0.5"} 4931.921
+test_summary_1_duration_microseconds{label1="value3",quantile="0.9"} 4932.921
+test_summary_1_duration_microseconds{label1="value3",quantile="0.99"} 4933.921
+test_summary_1_duration_microseconds_sum{label1="value3"} 283201.29
+test_summary_1_duration_microseconds_count{label1="value3"} 31
+test_summary_1_duration_microseconds{label1="value4",quantile="0.5"} 4931.921
+test_summary_1_duration_microseconds{label1="value4",quantile="0.9"} 4932.921
+test_summary_1_duration_microseconds{label1="value4",quantile="0.99"} 4933.921
+test_summary_1_duration_microseconds_sum{label1="value4"} 283201.29
+test_summary_1_duration_microseconds_count{label1="value4"} 31
+# TYPE test_summary_2_duration_microseconds summary
+test_summary_2_duration_microseconds{label1="value1",quantile="0.5"} 5931.921
+test_summary_2_duration_microseconds{label1="value1",quantile="0.9"} 5932.921
+test_summary_2_duration_microseconds{label1="value1",quantile="0.99"} 5933.921
+test_summary_2_duration_microseconds_sum{label1="value1"} 383201.29
+test_summary_2_duration_microseconds_count{label1="value1"} 41
+test_summary_2_duration_microseconds{label1="value2",quantile="0.5"} 5931.921
+test_summary_2_duration_microseconds{label1="value2",quantile="0.9"} 5932.921
+test_summary_2_duration_microseconds{label1="value2",quantile="0.99"} 5933.921
+test_summary_2_duration_microseconds_sum{label1="value2"} 383201.29
+test_summary_2_duration_microseconds_count{label1="value2"} 41
+test_summary_2_duration_microseconds{label1="value3",quantile="0.5"} 5931.921
+test_summary_2_duration_microseconds{label1="value3",quantile="0.9"} 5932.921
+test_summary_2_duration_microseconds{label1="value3",quantile="0.99"} 5933.921
+test_summary_2_duration_microseconds_sum{label1="value3"} 383201.29
+test_summary_2_duration_microseconds_count{label1="value3"} 41
+test_summary_2_duration_microseconds{label1="value4",quantile="0.5"} 5931.921
+test_summary_2_duration_microseconds{label1="value4",quantile="0.9"} 5932.921
+test_summary_2_duration_microseconds{label1="value4",quantile="0.99"} 5933.921
+test_summary_2_duration_microseconds_sum{label1="value4"} 383201.29
+test_summary_2_duration_microseconds_count{label1="value4"} 41
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-no-meta.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-no-meta.txt
new file mode 100644
index 000000000..e66564bb7
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/summary-no-meta.txt
@@ -0,0 +1,40 @@
+test_summary_no_meta_1_duration_microseconds{label1="value1",quantile="0.5"} 4931.921
+test_summary_no_meta_1_duration_microseconds{label1="value1",quantile="0.9"} 4932.921
+test_summary_no_meta_1_duration_microseconds{label1="value1",quantile="0.99"} 4933.921
+test_summary_no_meta_1_duration_microseconds_sum{label1="value1"} 283201.29
+test_summary_no_meta_1_duration_microseconds_count{label1="value1"} 31
+test_summary_no_meta_1_duration_microseconds{label1="value2",quantile="0.5"} 4931.921
+test_summary_no_meta_1_duration_microseconds{label1="value2",quantile="0.9"} 4932.921
+test_summary_no_meta_1_duration_microseconds{label1="value2",quantile="0.99"} 4933.921
+test_summary_no_meta_1_duration_microseconds_sum{label1="value2"} 283201.29
+test_summary_no_meta_1_duration_microseconds_count{label1="value2"} 31
+test_summary_no_meta_1_duration_microseconds{label1="value3",quantile="0.5"} 4931.921
+test_summary_no_meta_1_duration_microseconds{label1="value3",quantile="0.9"} 4932.921
+test_summary_no_meta_1_duration_microseconds{label1="value3",quantile="0.99"} 4933.921
+test_summary_no_meta_1_duration_microseconds_sum{label1="value3"} 283201.29
+test_summary_no_meta_1_duration_microseconds_count{label1="value3"} 31
+test_summary_no_meta_1_duration_microseconds{label1="value4",quantile="0.5"} 4931.921
+test_summary_no_meta_1_duration_microseconds{label1="value4",quantile="0.9"} 4932.921
+test_summary_no_meta_1_duration_microseconds{label1="value4",quantile="0.99"} 4933.921
+test_summary_no_meta_1_duration_microseconds_sum{label1="value4"} 283201.29
+test_summary_no_meta_1_duration_microseconds_count{label1="value4"} 31
+test_summary_no_meta_2_duration_microseconds{label1="value1",quantile="0.5"} 5931.921
+test_summary_no_meta_2_duration_microseconds{label1="value1",quantile="0.9"} 5932.921
+test_summary_no_meta_2_duration_microseconds{label1="value1",quantile="0.99"} 5933.921
+test_summary_no_meta_2_duration_microseconds_sum{label1="value1"} 383201.29
+test_summary_no_meta_2_duration_microseconds_count{label1="value1"} 41
+test_summary_no_meta_2_duration_microseconds{label1="value2",quantile="0.5"} 5931.921
+test_summary_no_meta_2_duration_microseconds{label1="value2",quantile="0.9"} 5932.921
+test_summary_no_meta_2_duration_microseconds{label1="value2",quantile="0.99"} 5933.921
+test_summary_no_meta_2_duration_microseconds_sum{label1="value2"} 383201.29
+test_summary_no_meta_2_duration_microseconds_count{label1="value2"} 41
+test_summary_no_meta_2_duration_microseconds{label1="value3",quantile="0.5"} 5931.921
+test_summary_no_meta_2_duration_microseconds{label1="value3",quantile="0.9"} 5932.921
+test_summary_no_meta_2_duration_microseconds{label1="value3",quantile="0.99"} 5933.921
+test_summary_no_meta_2_duration_microseconds_sum{label1="value3"} 383201.29
+test_summary_no_meta_2_duration_microseconds_count{label1="value3"} 41
+test_summary_no_meta_2_duration_microseconds{label1="value4",quantile="0.5"} 5931.921
+test_summary_no_meta_2_duration_microseconds{label1="value4",quantile="0.9"} 5932.921
+test_summary_no_meta_2_duration_microseconds{label1="value4",quantile="0.99"} 5933.921
+test_summary_no_meta_2_duration_microseconds_sum{label1="value4"} 383201.29
+test_summary_no_meta_2_duration_microseconds_count{label1="value4"} 41
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.nometa.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.nometa.txt
new file mode 100644
index 000000000..e760ad268
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.nometa.txt
@@ -0,0 +1,410 @@
+go_gc_duration_seconds{quantile="0"} 4.9351e-05
+go_gc_duration_seconds{quantile="0.25"} 7.424100000000001e-05
+go_gc_duration_seconds{quantile="0.5"} 8.3835e-05
+go_gc_duration_seconds{quantile="0.75"} 0.000106744
+go_gc_duration_seconds{quantile="1"} 0.002072195
+go_gc_duration_seconds_sum 0.012139815
+go_gc_duration_seconds_count 99
+go_goroutines 33
+go_memstats_alloc_bytes 1.7518624e+07
+go_memstats_alloc_bytes_total 8.3062296e+08
+go_memstats_buck_hash_sys_bytes 1.494637e+06
+go_memstats_frees_total 4.65658e+06
+go_memstats_gc_sys_bytes 1.107968e+06
+go_memstats_heap_alloc_bytes 1.7518624e+07
+go_memstats_heap_idle_bytes 6.668288e+06
+go_memstats_heap_inuse_bytes 1.8956288e+07
+go_memstats_heap_objects 72755
+go_memstats_heap_released_bytes_total 0
+go_memstats_heap_sys_bytes 2.5624576e+07
+go_memstats_last_gc_time_seconds 1.4843955586166437e+09
+go_memstats_lookups_total 2089
+go_memstats_mallocs_total 4.729335e+06
+go_memstats_mcache_inuse_bytes 9600
+go_memstats_mcache_sys_bytes 16384
+go_memstats_mspan_inuse_bytes 211520
+go_memstats_mspan_sys_bytes 245760
+go_memstats_next_gc_bytes 2.033527e+07
+go_memstats_other_sys_bytes 2.077323e+06
+go_memstats_stack_inuse_bytes 1.6384e+06
+go_memstats_stack_sys_bytes 1.6384e+06
+go_memstats_sys_bytes 3.2205048e+07
+http_request_duration_microseconds{handler="alerts",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="alerts",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="alerts",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="alerts"} 0
+http_request_duration_microseconds_count{handler="alerts"} 0
+http_request_duration_microseconds{handler="config",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="config",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="config",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="config"} 0
+http_request_duration_microseconds_count{handler="config"} 0
+http_request_duration_microseconds{handler="consoles",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="consoles",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="consoles",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="consoles"} 0
+http_request_duration_microseconds_count{handler="consoles"} 0
+http_request_duration_microseconds{handler="drop_series",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="drop_series",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="drop_series",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="drop_series"} 0
+http_request_duration_microseconds_count{handler="drop_series"} 0
+http_request_duration_microseconds{handler="federate",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="federate",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="federate",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="federate"} 0
+http_request_duration_microseconds_count{handler="federate"} 0
+http_request_duration_microseconds{handler="flags",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="flags",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="flags",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="flags"} 0
+http_request_duration_microseconds_count{handler="flags"} 0
+http_request_duration_microseconds{handler="graph",quantile="0.5"} 771.655
+http_request_duration_microseconds{handler="graph",quantile="0.9"} 1761.823
+http_request_duration_microseconds{handler="graph",quantile="0.99"} 1761.823
+http_request_duration_microseconds_sum{handler="graph"} 5803.93
+http_request_duration_microseconds_count{handler="graph"} 3
+http_request_duration_microseconds{handler="heap",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="heap",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="heap",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="heap"} 0
+http_request_duration_microseconds_count{handler="heap"} 0
+http_request_duration_microseconds{handler="label_values",quantile="0.5"} 325.401
+http_request_duration_microseconds{handler="label_values",quantile="0.9"} 414.708
+http_request_duration_microseconds{handler="label_values",quantile="0.99"} 414.708
+http_request_duration_microseconds_sum{handler="label_values"} 3995.574
+http_request_duration_microseconds_count{handler="label_values"} 3
+http_request_duration_microseconds{handler="options",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="options",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="options",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="options"} 0
+http_request_duration_microseconds_count{handler="options"} 0
+http_request_duration_microseconds{handler="prometheus",quantile="0.5"} 1351.859
+http_request_duration_microseconds{handler="prometheus",quantile="0.9"} 1714.035
+http_request_duration_microseconds{handler="prometheus",quantile="0.99"} 2833.523
+http_request_duration_microseconds_sum{handler="prometheus"} 661851.54
+http_request_duration_microseconds_count{handler="prometheus"} 462
+http_request_duration_microseconds{handler="query",quantile="0.5"} 3885.448
+http_request_duration_microseconds{handler="query",quantile="0.9"} 4390.558
+http_request_duration_microseconds{handler="query",quantile="0.99"} 4390.558
+http_request_duration_microseconds_sum{handler="query"} 26074.11
+http_request_duration_microseconds_count{handler="query"} 6
+http_request_duration_microseconds{handler="query_range",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="query_range",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="query_range",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="query_range"} 0
+http_request_duration_microseconds_count{handler="query_range"} 0
+http_request_duration_microseconds{handler="rules",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="rules",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="rules",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="rules"} 0
+http_request_duration_microseconds_count{handler="rules"} 0
+http_request_duration_microseconds{handler="series",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="series",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="series",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="series"} 0
+http_request_duration_microseconds_count{handler="series"} 0
+http_request_duration_microseconds{handler="static",quantile="0.5"} 212.311
+http_request_duration_microseconds{handler="static",quantile="0.9"} 265.174
+http_request_duration_microseconds{handler="static",quantile="0.99"} 265.174
+http_request_duration_microseconds_sum{handler="static"} 6458.621
+http_request_duration_microseconds_count{handler="static"} 3
+http_request_duration_microseconds{handler="status",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="status",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="status",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="status"} 0
+http_request_duration_microseconds_count{handler="status"} 0
+http_request_duration_microseconds{handler="targets",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="targets",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="targets",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="targets"} 0
+http_request_duration_microseconds_count{handler="targets"} 0
+http_request_duration_microseconds{handler="version",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="version",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="version",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="version"} 0
+http_request_duration_microseconds_count{handler="version"} 0
+http_request_size_bytes{handler="alerts",quantile="0.5"} NaN
+http_request_size_bytes{handler="alerts",quantile="0.9"} NaN
+http_request_size_bytes{handler="alerts",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="alerts"} 0
+http_request_size_bytes_count{handler="alerts"} 0
+http_request_size_bytes{handler="config",quantile="0.5"} NaN
+http_request_size_bytes{handler="config",quantile="0.9"} NaN
+http_request_size_bytes{handler="config",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="config"} 0
+http_request_size_bytes_count{handler="config"} 0
+http_request_size_bytes{handler="consoles",quantile="0.5"} NaN
+http_request_size_bytes{handler="consoles",quantile="0.9"} NaN
+http_request_size_bytes{handler="consoles",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="consoles"} 0
+http_request_size_bytes_count{handler="consoles"} 0
+http_request_size_bytes{handler="drop_series",quantile="0.5"} NaN
+http_request_size_bytes{handler="drop_series",quantile="0.9"} NaN
+http_request_size_bytes{handler="drop_series",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="drop_series"} 0
+http_request_size_bytes_count{handler="drop_series"} 0
+http_request_size_bytes{handler="federate",quantile="0.5"} NaN
+http_request_size_bytes{handler="federate",quantile="0.9"} NaN
+http_request_size_bytes{handler="federate",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="federate"} 0
+http_request_size_bytes_count{handler="federate"} 0
+http_request_size_bytes{handler="flags",quantile="0.5"} NaN
+http_request_size_bytes{handler="flags",quantile="0.9"} NaN
+http_request_size_bytes{handler="flags",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="flags"} 0
+http_request_size_bytes_count{handler="flags"} 0
+http_request_size_bytes{handler="graph",quantile="0.5"} 367
+http_request_size_bytes{handler="graph",quantile="0.9"} 389
+http_request_size_bytes{handler="graph",quantile="0.99"} 389
+http_request_size_bytes_sum{handler="graph"} 1145
+http_request_size_bytes_count{handler="graph"} 3
+http_request_size_bytes{handler="heap",quantile="0.5"} NaN
+http_request_size_bytes{handler="heap",quantile="0.9"} NaN
+http_request_size_bytes{handler="heap",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="heap"} 0
+http_request_size_bytes_count{handler="heap"} 0
+http_request_size_bytes{handler="label_values",quantile="0.5"} 416
+http_request_size_bytes{handler="label_values",quantile="0.9"} 416
+http_request_size_bytes{handler="label_values",quantile="0.99"} 416
+http_request_size_bytes_sum{handler="label_values"} 1248
+http_request_size_bytes_count{handler="label_values"} 3
+http_request_size_bytes{handler="options",quantile="0.5"} NaN
+http_request_size_bytes{handler="options",quantile="0.9"} NaN
+http_request_size_bytes{handler="options",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="options"} 0
+http_request_size_bytes_count{handler="options"} 0
+http_request_size_bytes{handler="prometheus",quantile="0.5"} 238
+http_request_size_bytes{handler="prometheus",quantile="0.9"} 238
+http_request_size_bytes{handler="prometheus",quantile="0.99"} 238
+http_request_size_bytes_sum{handler="prometheus"} 109956
+http_request_size_bytes_count{handler="prometheus"} 462
+http_request_size_bytes{handler="query",quantile="0.5"} 531
+http_request_size_bytes{handler="query",quantile="0.9"} 531
+http_request_size_bytes{handler="query",quantile="0.99"} 531
+http_request_size_bytes_sum{handler="query"} 3186
+http_request_size_bytes_count{handler="query"} 6
+http_request_size_bytes{handler="query_range",quantile="0.5"} NaN
+http_request_size_bytes{handler="query_range",quantile="0.9"} NaN
+http_request_size_bytes{handler="query_range",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="query_range"} 0
+http_request_size_bytes_count{handler="query_range"} 0
+http_request_size_bytes{handler="rules",quantile="0.5"} NaN
+http_request_size_bytes{handler="rules",quantile="0.9"} NaN
+http_request_size_bytes{handler="rules",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="rules"} 0
+http_request_size_bytes_count{handler="rules"} 0
+http_request_size_bytes{handler="series",quantile="0.5"} NaN
+http_request_size_bytes{handler="series",quantile="0.9"} NaN
+http_request_size_bytes{handler="series",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="series"} 0
+http_request_size_bytes_count{handler="series"} 0
+http_request_size_bytes{handler="static",quantile="0.5"} 379
+http_request_size_bytes{handler="static",quantile="0.9"} 379
+http_request_size_bytes{handler="static",quantile="0.99"} 379
+http_request_size_bytes_sum{handler="static"} 1137
+http_request_size_bytes_count{handler="static"} 3
+http_request_size_bytes{handler="status",quantile="0.5"} NaN
+http_request_size_bytes{handler="status",quantile="0.9"} NaN
+http_request_size_bytes{handler="status",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="status"} 0
+http_request_size_bytes_count{handler="status"} 0
+http_request_size_bytes{handler="targets",quantile="0.5"} NaN
+http_request_size_bytes{handler="targets",quantile="0.9"} NaN
+http_request_size_bytes{handler="targets",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="targets"} 0
+http_request_size_bytes_count{handler="targets"} 0
+http_request_size_bytes{handler="version",quantile="0.5"} NaN
+http_request_size_bytes{handler="version",quantile="0.9"} NaN
+http_request_size_bytes{handler="version",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="version"} 0
+http_request_size_bytes_count{handler="version"} 0
+http_requests_total{code="200",handler="graph",method="get"} 3
+http_requests_total{code="200",handler="label_values",method="get"} 3
+http_requests_total{code="200",handler="prometheus",method="get"} 462
+http_requests_total{code="200",handler="query",method="get"} 6
+http_requests_total{code="200",handler="static",method="get"} 3
+http_response_size_bytes{handler="alerts",quantile="0.5"} NaN
+http_response_size_bytes{handler="alerts",quantile="0.9"} NaN
+http_response_size_bytes{handler="alerts",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="alerts"} 0
+http_response_size_bytes_count{handler="alerts"} 0
+http_response_size_bytes{handler="config",quantile="0.5"} NaN
+http_response_size_bytes{handler="config",quantile="0.9"} NaN
+http_response_size_bytes{handler="config",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="config"} 0
+http_response_size_bytes_count{handler="config"} 0
+http_response_size_bytes{handler="consoles",quantile="0.5"} NaN
+http_response_size_bytes{handler="consoles",quantile="0.9"} NaN
+http_response_size_bytes{handler="consoles",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="consoles"} 0
+http_response_size_bytes_count{handler="consoles"} 0
+http_response_size_bytes{handler="drop_series",quantile="0.5"} NaN
+http_response_size_bytes{handler="drop_series",quantile="0.9"} NaN
+http_response_size_bytes{handler="drop_series",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="drop_series"} 0
+http_response_size_bytes_count{handler="drop_series"} 0
+http_response_size_bytes{handler="federate",quantile="0.5"} NaN
+http_response_size_bytes{handler="federate",quantile="0.9"} NaN
+http_response_size_bytes{handler="federate",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="federate"} 0
+http_response_size_bytes_count{handler="federate"} 0
+http_response_size_bytes{handler="flags",quantile="0.5"} NaN
+http_response_size_bytes{handler="flags",quantile="0.9"} NaN
+http_response_size_bytes{handler="flags",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="flags"} 0
+http_response_size_bytes_count{handler="flags"} 0
+http_response_size_bytes{handler="graph",quantile="0.5"} 3619
+http_response_size_bytes{handler="graph",quantile="0.9"} 3619
+http_response_size_bytes{handler="graph",quantile="0.99"} 3619
+http_response_size_bytes_sum{handler="graph"} 10857
+http_response_size_bytes_count{handler="graph"} 3
+http_response_size_bytes{handler="heap",quantile="0.5"} NaN
+http_response_size_bytes{handler="heap",quantile="0.9"} NaN
+http_response_size_bytes{handler="heap",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="heap"} 0
+http_response_size_bytes_count{handler="heap"} 0
+http_response_size_bytes{handler="label_values",quantile="0.5"} 642
+http_response_size_bytes{handler="label_values",quantile="0.9"} 642
+http_response_size_bytes{handler="label_values",quantile="0.99"} 642
+http_response_size_bytes_sum{handler="label_values"} 1926
+http_response_size_bytes_count{handler="label_values"} 3
+http_response_size_bytes{handler="options",quantile="0.5"} NaN
+http_response_size_bytes{handler="options",quantile="0.9"} NaN
+http_response_size_bytes{handler="options",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="options"} 0
+http_response_size_bytes_count{handler="options"} 0
+http_response_size_bytes{handler="prometheus",quantile="0.5"} 3033
+http_response_size_bytes{handler="prometheus",quantile="0.9"} 3123
+http_response_size_bytes{handler="prometheus",quantile="0.99"} 3128
+http_response_size_bytes_sum{handler="prometheus"} 1.374097e+06
+http_response_size_bytes_count{handler="prometheus"} 462
+http_response_size_bytes{handler="query",quantile="0.5"} 776
+http_response_size_bytes{handler="query",quantile="0.9"} 781
+http_response_size_bytes{handler="query",quantile="0.99"} 781
+http_response_size_bytes_sum{handler="query"} 4656
+http_response_size_bytes_count{handler="query"} 6
+http_response_size_bytes{handler="query_range",quantile="0.5"} NaN
+http_response_size_bytes{handler="query_range",quantile="0.9"} NaN
+http_response_size_bytes{handler="query_range",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="query_range"} 0
+http_response_size_bytes_count{handler="query_range"} 0
+http_response_size_bytes{handler="rules",quantile="0.5"} NaN
+http_response_size_bytes{handler="rules",quantile="0.9"} NaN
+http_response_size_bytes{handler="rules",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="rules"} 0
+http_response_size_bytes_count{handler="rules"} 0
+http_response_size_bytes{handler="series",quantile="0.5"} NaN
+http_response_size_bytes{handler="series",quantile="0.9"} NaN
+http_response_size_bytes{handler="series",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="series"} 0
+http_response_size_bytes_count{handler="series"} 0
+http_response_size_bytes{handler="static",quantile="0.5"} 6316
+http_response_size_bytes{handler="static",quantile="0.9"} 6316
+http_response_size_bytes{handler="static",quantile="0.99"} 6316
+http_response_size_bytes_sum{handler="static"} 18948
+http_response_size_bytes_count{handler="static"} 3
+http_response_size_bytes{handler="status",quantile="0.5"} NaN
+http_response_size_bytes{handler="status",quantile="0.9"} NaN
+http_response_size_bytes{handler="status",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="status"} 0
+http_response_size_bytes_count{handler="status"} 0
+http_response_size_bytes{handler="targets",quantile="0.5"} NaN
+http_response_size_bytes{handler="targets",quantile="0.9"} NaN
+http_response_size_bytes{handler="targets",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="targets"} 0
+http_response_size_bytes_count{handler="targets"} 0
+http_response_size_bytes{handler="version",quantile="0.5"} NaN
+http_response_size_bytes{handler="version",quantile="0.9"} NaN
+http_response_size_bytes{handler="version",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="version"} 0
+http_response_size_bytes_count{handler="version"} 0
+prometheus_build_info{branch="",goversion="go1.7.3",revision="",version=""} 1
+prometheus_config_last_reload_success_timestamp_seconds 1.484395547e+09
+prometheus_config_last_reload_successful 1
+prometheus_evaluator_duration_seconds{quantile="0.01"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.05"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.5"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.9"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.99"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds_sum 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds_count 1
+prometheus_evaluator_iterations_skipped_total 0
+prometheus_notifications_dropped_total 0
+prometheus_notifications_queue_capacity 10000
+prometheus_notifications_queue_length 0
+prometheus_rule_evaluation_failures_total{rule_type="alerting"} 0
+prometheus_rule_evaluation_failures_total{rule_type="recording"} 0
+prometheus_sd_azure_refresh_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_azure_refresh_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_azure_refresh_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_azure_refresh_duration_seconds_sum 0
+prometheus_sd_azure_refresh_duration_seconds_count 0
+prometheus_sd_azure_refresh_failures_total 0
+prometheus_sd_consul_rpc_duration_seconds{call="service",endpoint="catalog",quantile="0.5"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="service",endpoint="catalog",quantile="0.9"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="service",endpoint="catalog",quantile="0.99"} NaN
+prometheus_sd_consul_rpc_duration_seconds_sum{call="service",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_duration_seconds_count{call="service",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_duration_seconds{call="services",endpoint="catalog",quantile="0.5"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="services",endpoint="catalog",quantile="0.9"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="services",endpoint="catalog",quantile="0.99"} NaN
+prometheus_sd_consul_rpc_duration_seconds_sum{call="services",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_duration_seconds_count{call="services",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_failures_total 0
+prometheus_sd_dns_lookup_failures_total 0
+prometheus_sd_dns_lookups_total 0
+prometheus_sd_ec2_refresh_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_ec2_refresh_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_ec2_refresh_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_ec2_refresh_duration_seconds_sum 0
+prometheus_sd_ec2_refresh_duration_seconds_count 0
+prometheus_sd_ec2_refresh_failures_total 0
+prometheus_sd_file_read_errors_total 0
+prometheus_sd_file_scan_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_file_scan_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_file_scan_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_file_scan_duration_seconds_sum 0
+prometheus_sd_file_scan_duration_seconds_count 0
+prometheus_sd_gce_refresh_duration{quantile="0.5"} NaN
+prometheus_sd_gce_refresh_duration{quantile="0.9"} NaN
+prometheus_sd_gce_refresh_duration{quantile="0.99"} NaN
+prometheus_sd_gce_refresh_duration_sum 0
+prometheus_sd_gce_refresh_duration_count 0
+prometheus_sd_gce_refresh_failures_total 0
+prometheus_sd_kubernetes_events_total{event="add",role="endpoints"} 0
+prometheus_sd_kubernetes_events_total{event="add",role="node"} 0
+prometheus_sd_kubernetes_events_total{event="add",role="pod"} 0
+prometheus_sd_kubernetes_events_total{event="add",role="service"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="endpoints"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="node"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="pod"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="service"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="endpoints"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="node"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="pod"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="service"} 0
+prometheus_sd_marathon_refresh_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_marathon_refresh_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_marathon_refresh_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_marathon_refresh_duration_seconds_sum 0
+prometheus_sd_marathon_refresh_duration_seconds_count 0
+prometheus_sd_marathon_refresh_failures_total 0
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.01"} 0.046182157
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.05"} 0.047306979000000006
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.5"} 0.050381782
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.9"} 0.052614556
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.99"} 0.054404386000000006
+prometheus_target_interval_length_seconds_sum{interval="50ms"} 34.512091221999995
+prometheus_target_interval_length_seconds_count{interval="50ms"} 685
+prometheus_target_scrape_pool_sync_total{scrape_job="prometheus"} 1
+prometheus_target_skipped_scrapes_total 0
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.01"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.05"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.5"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.9"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.99"} 0.00020043300000000002
+prometheus_target_sync_length_seconds_sum{scrape_job="prometheus"} 0.00020043300000000002
+prometheus_target_sync_length_seconds_count{scrape_job="prometheus"} 1
+prometheus_treecache_watcher_goroutines 0
+prometheus_treecache_zookeeper_failures_total 0
diff --git a/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.txt b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.txt
new file mode 100644
index 000000000..c7f2a7af0
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/prometheus/testdata/testdata.txt
@@ -0,0 +1,528 @@
+# HELP go_gc_duration_seconds A summary of the GC invocation durations.
+# TYPE go_gc_duration_seconds summary
+go_gc_duration_seconds{quantile="0"} 4.9351e-05
+go_gc_duration_seconds{quantile="0.25"} 7.424100000000001e-05
+go_gc_duration_seconds{quantile="0.5"} 8.3835e-05
+go_gc_duration_seconds{quantile="0.75"} 0.000106744
+go_gc_duration_seconds{quantile="1"} 0.002072195
+go_gc_duration_seconds_sum 0.012139815
+go_gc_duration_seconds_count 99
+# HELP go_goroutines Number of goroutines that currently exist.
+# TYPE go_goroutines gauge
+go_goroutines 33
+# HELP go_memstats_alloc_bytes Number of bytes allocated and still in use.
+# TYPE go_memstats_alloc_bytes gauge
+go_memstats_alloc_bytes 1.7518624e+07
+# HELP go_memstats_alloc_bytes_total Total number of bytes allocated, even if freed.
+# TYPE go_memstats_alloc_bytes_total counter
+go_memstats_alloc_bytes_total 8.3062296e+08
+# HELP go_memstats_buck_hash_sys_bytes Number of bytes used by the profiling bucket hash table.
+# TYPE go_memstats_buck_hash_sys_bytes gauge
+go_memstats_buck_hash_sys_bytes 1.494637e+06
+# HELP go_memstats_frees_total Total number of frees.
+# TYPE go_memstats_frees_total counter
+go_memstats_frees_total 4.65658e+06
+# HELP go_memstats_gc_sys_bytes Number of bytes used for garbage collection system metadata.
+# TYPE go_memstats_gc_sys_bytes gauge
+go_memstats_gc_sys_bytes 1.107968e+06
+# HELP go_memstats_heap_alloc_bytes Number of heap bytes allocated and still in use.
+# TYPE go_memstats_heap_alloc_bytes gauge
+go_memstats_heap_alloc_bytes 1.7518624e+07
+# HELP go_memstats_heap_idle_bytes Number of heap bytes waiting to be used.
+# TYPE go_memstats_heap_idle_bytes gauge
+go_memstats_heap_idle_bytes 6.668288e+06
+# HELP go_memstats_heap_inuse_bytes Number of heap bytes that are in use.
+# TYPE go_memstats_heap_inuse_bytes gauge
+go_memstats_heap_inuse_bytes 1.8956288e+07
+# HELP go_memstats_heap_objects Number of allocated objects.
+# TYPE go_memstats_heap_objects gauge
+go_memstats_heap_objects 72755
+# HELP go_memstats_heap_released_bytes_total Total number of heap bytes released to OS.
+# TYPE go_memstats_heap_released_bytes_total counter
+go_memstats_heap_released_bytes_total 0
+# HELP go_memstats_heap_sys_bytes Number of heap bytes obtained from system.
+# TYPE go_memstats_heap_sys_bytes gauge
+go_memstats_heap_sys_bytes 2.5624576e+07
+# HELP go_memstats_last_gc_time_seconds Number of seconds since 1970 of last garbage collection.
+# TYPE go_memstats_last_gc_time_seconds gauge
+go_memstats_last_gc_time_seconds 1.4843955586166437e+09
+# HELP go_memstats_lookups_total Total number of pointer lookups.
+# TYPE go_memstats_lookups_total counter
+go_memstats_lookups_total 2089
+# HELP go_memstats_mallocs_total Total number of mallocs.
+# TYPE go_memstats_mallocs_total counter
+go_memstats_mallocs_total 4.729335e+06
+# HELP go_memstats_mcache_inuse_bytes Number of bytes in use by mcache structures.
+# TYPE go_memstats_mcache_inuse_bytes gauge
+go_memstats_mcache_inuse_bytes 9600
+# HELP go_memstats_mcache_sys_bytes Number of bytes used for mcache structures obtained from system.
+# TYPE go_memstats_mcache_sys_bytes gauge
+go_memstats_mcache_sys_bytes 16384
+# HELP go_memstats_mspan_inuse_bytes Number of bytes in use by mspan structures.
+# TYPE go_memstats_mspan_inuse_bytes gauge
+go_memstats_mspan_inuse_bytes 211520
+# HELP go_memstats_mspan_sys_bytes Number of bytes used for mspan structures obtained from system.
+# TYPE go_memstats_mspan_sys_bytes gauge
+go_memstats_mspan_sys_bytes 245760
+# HELP go_memstats_next_gc_bytes Number of heap bytes when next garbage collection will take place.
+# TYPE go_memstats_next_gc_bytes gauge
+go_memstats_next_gc_bytes 2.033527e+07
+# HELP go_memstats_other_sys_bytes Number of bytes used for other system allocations.
+# TYPE go_memstats_other_sys_bytes gauge
+go_memstats_other_sys_bytes 2.077323e+06
+# HELP go_memstats_stack_inuse_bytes Number of bytes in use by the stack allocator.
+# TYPE go_memstats_stack_inuse_bytes gauge
+go_memstats_stack_inuse_bytes 1.6384e+06
+# HELP go_memstats_stack_sys_bytes Number of bytes obtained from system for stack allocator.
+# TYPE go_memstats_stack_sys_bytes gauge
+go_memstats_stack_sys_bytes 1.6384e+06
+# HELP go_memstats_sys_bytes Number of bytes obtained by system. Sum of all system allocations.
+# TYPE go_memstats_sys_bytes gauge
+go_memstats_sys_bytes 3.2205048e+07
+# HELP http_request_duration_microseconds The HTTP request latencies in microseconds.
+# TYPE http_request_duration_microseconds summary
+http_request_duration_microseconds{handler="alerts",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="alerts",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="alerts",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="alerts"} 0
+http_request_duration_microseconds_count{handler="alerts"} 0
+http_request_duration_microseconds{handler="config",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="config",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="config",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="config"} 0
+http_request_duration_microseconds_count{handler="config"} 0
+http_request_duration_microseconds{handler="consoles",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="consoles",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="consoles",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="consoles"} 0
+http_request_duration_microseconds_count{handler="consoles"} 0
+http_request_duration_microseconds{handler="drop_series",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="drop_series",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="drop_series",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="drop_series"} 0
+http_request_duration_microseconds_count{handler="drop_series"} 0
+http_request_duration_microseconds{handler="federate",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="federate",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="federate",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="federate"} 0
+http_request_duration_microseconds_count{handler="federate"} 0
+http_request_duration_microseconds{handler="flags",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="flags",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="flags",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="flags"} 0
+http_request_duration_microseconds_count{handler="flags"} 0
+http_request_duration_microseconds{handler="graph",quantile="0.5"} 771.655
+http_request_duration_microseconds{handler="graph",quantile="0.9"} 1761.823
+http_request_duration_microseconds{handler="graph",quantile="0.99"} 1761.823
+http_request_duration_microseconds_sum{handler="graph"} 5803.93
+http_request_duration_microseconds_count{handler="graph"} 3
+http_request_duration_microseconds{handler="heap",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="heap",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="heap",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="heap"} 0
+http_request_duration_microseconds_count{handler="heap"} 0
+http_request_duration_microseconds{handler="label_values",quantile="0.5"} 325.401
+http_request_duration_microseconds{handler="label_values",quantile="0.9"} 414.708
+http_request_duration_microseconds{handler="label_values",quantile="0.99"} 414.708
+http_request_duration_microseconds_sum{handler="label_values"} 3995.574
+http_request_duration_microseconds_count{handler="label_values"} 3
+http_request_duration_microseconds{handler="options",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="options",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="options",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="options"} 0
+http_request_duration_microseconds_count{handler="options"} 0
+http_request_duration_microseconds{handler="prometheus",quantile="0.5"} 1351.859
+http_request_duration_microseconds{handler="prometheus",quantile="0.9"} 1714.035
+http_request_duration_microseconds{handler="prometheus",quantile="0.99"} 2833.523
+http_request_duration_microseconds_sum{handler="prometheus"} 661851.54
+http_request_duration_microseconds_count{handler="prometheus"} 462
+http_request_duration_microseconds{handler="query",quantile="0.5"} 3885.448
+http_request_duration_microseconds{handler="query",quantile="0.9"} 4390.558
+http_request_duration_microseconds{handler="query",quantile="0.99"} 4390.558
+http_request_duration_microseconds_sum{handler="query"} 26074.11
+http_request_duration_microseconds_count{handler="query"} 6
+http_request_duration_microseconds{handler="query_range",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="query_range",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="query_range",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="query_range"} 0
+http_request_duration_microseconds_count{handler="query_range"} 0
+http_request_duration_microseconds{handler="rules",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="rules",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="rules",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="rules"} 0
+http_request_duration_microseconds_count{handler="rules"} 0
+http_request_duration_microseconds{handler="series",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="series",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="series",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="series"} 0
+http_request_duration_microseconds_count{handler="series"} 0
+http_request_duration_microseconds{handler="static",quantile="0.5"} 212.311
+http_request_duration_microseconds{handler="static",quantile="0.9"} 265.174
+http_request_duration_microseconds{handler="static",quantile="0.99"} 265.174
+http_request_duration_microseconds_sum{handler="static"} 6458.621
+http_request_duration_microseconds_count{handler="static"} 3
+http_request_duration_microseconds{handler="status",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="status",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="status",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="status"} 0
+http_request_duration_microseconds_count{handler="status"} 0
+http_request_duration_microseconds{handler="targets",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="targets",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="targets",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="targets"} 0
+http_request_duration_microseconds_count{handler="targets"} 0
+http_request_duration_microseconds{handler="version",quantile="0.5"} NaN
+http_request_duration_microseconds{handler="version",quantile="0.9"} NaN
+http_request_duration_microseconds{handler="version",quantile="0.99"} NaN
+http_request_duration_microseconds_sum{handler="version"} 0
+http_request_duration_microseconds_count{handler="version"} 0
+# HELP http_request_size_bytes The HTTP request sizes in bytes.
+# TYPE http_request_size_bytes summary
+http_request_size_bytes{handler="alerts",quantile="0.5"} NaN
+http_request_size_bytes{handler="alerts",quantile="0.9"} NaN
+http_request_size_bytes{handler="alerts",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="alerts"} 0
+http_request_size_bytes_count{handler="alerts"} 0
+http_request_size_bytes{handler="config",quantile="0.5"} NaN
+http_request_size_bytes{handler="config",quantile="0.9"} NaN
+http_request_size_bytes{handler="config",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="config"} 0
+http_request_size_bytes_count{handler="config"} 0
+http_request_size_bytes{handler="consoles",quantile="0.5"} NaN
+http_request_size_bytes{handler="consoles",quantile="0.9"} NaN
+http_request_size_bytes{handler="consoles",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="consoles"} 0
+http_request_size_bytes_count{handler="consoles"} 0
+http_request_size_bytes{handler="drop_series",quantile="0.5"} NaN
+http_request_size_bytes{handler="drop_series",quantile="0.9"} NaN
+http_request_size_bytes{handler="drop_series",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="drop_series"} 0
+http_request_size_bytes_count{handler="drop_series"} 0
+http_request_size_bytes{handler="federate",quantile="0.5"} NaN
+http_request_size_bytes{handler="federate",quantile="0.9"} NaN
+http_request_size_bytes{handler="federate",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="federate"} 0
+http_request_size_bytes_count{handler="federate"} 0
+http_request_size_bytes{handler="flags",quantile="0.5"} NaN
+http_request_size_bytes{handler="flags",quantile="0.9"} NaN
+http_request_size_bytes{handler="flags",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="flags"} 0
+http_request_size_bytes_count{handler="flags"} 0
+http_request_size_bytes{handler="graph",quantile="0.5"} 367
+http_request_size_bytes{handler="graph",quantile="0.9"} 389
+http_request_size_bytes{handler="graph",quantile="0.99"} 389
+http_request_size_bytes_sum{handler="graph"} 1145
+http_request_size_bytes_count{handler="graph"} 3
+http_request_size_bytes{handler="heap",quantile="0.5"} NaN
+http_request_size_bytes{handler="heap",quantile="0.9"} NaN
+http_request_size_bytes{handler="heap",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="heap"} 0
+http_request_size_bytes_count{handler="heap"} 0
+http_request_size_bytes{handler="label_values",quantile="0.5"} 416
+http_request_size_bytes{handler="label_values",quantile="0.9"} 416
+http_request_size_bytes{handler="label_values",quantile="0.99"} 416
+http_request_size_bytes_sum{handler="label_values"} 1248
+http_request_size_bytes_count{handler="label_values"} 3
+http_request_size_bytes{handler="options",quantile="0.5"} NaN
+http_request_size_bytes{handler="options",quantile="0.9"} NaN
+http_request_size_bytes{handler="options",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="options"} 0
+http_request_size_bytes_count{handler="options"} 0
+http_request_size_bytes{handler="prometheus",quantile="0.5"} 238
+http_request_size_bytes{handler="prometheus",quantile="0.9"} 238
+http_request_size_bytes{handler="prometheus",quantile="0.99"} 238
+http_request_size_bytes_sum{handler="prometheus"} 109956
+http_request_size_bytes_count{handler="prometheus"} 462
+http_request_size_bytes{handler="query",quantile="0.5"} 531
+http_request_size_bytes{handler="query",quantile="0.9"} 531
+http_request_size_bytes{handler="query",quantile="0.99"} 531
+http_request_size_bytes_sum{handler="query"} 3186
+http_request_size_bytes_count{handler="query"} 6
+http_request_size_bytes{handler="query_range",quantile="0.5"} NaN
+http_request_size_bytes{handler="query_range",quantile="0.9"} NaN
+http_request_size_bytes{handler="query_range",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="query_range"} 0
+http_request_size_bytes_count{handler="query_range"} 0
+http_request_size_bytes{handler="rules",quantile="0.5"} NaN
+http_request_size_bytes{handler="rules",quantile="0.9"} NaN
+http_request_size_bytes{handler="rules",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="rules"} 0
+http_request_size_bytes_count{handler="rules"} 0
+http_request_size_bytes{handler="series",quantile="0.5"} NaN
+http_request_size_bytes{handler="series",quantile="0.9"} NaN
+http_request_size_bytes{handler="series",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="series"} 0
+http_request_size_bytes_count{handler="series"} 0
+http_request_size_bytes{handler="static",quantile="0.5"} 379
+http_request_size_bytes{handler="static",quantile="0.9"} 379
+http_request_size_bytes{handler="static",quantile="0.99"} 379
+http_request_size_bytes_sum{handler="static"} 1137
+http_request_size_bytes_count{handler="static"} 3
+http_request_size_bytes{handler="status",quantile="0.5"} NaN
+http_request_size_bytes{handler="status",quantile="0.9"} NaN
+http_request_size_bytes{handler="status",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="status"} 0
+http_request_size_bytes_count{handler="status"} 0
+http_request_size_bytes{handler="targets",quantile="0.5"} NaN
+http_request_size_bytes{handler="targets",quantile="0.9"} NaN
+http_request_size_bytes{handler="targets",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="targets"} 0
+http_request_size_bytes_count{handler="targets"} 0
+http_request_size_bytes{handler="version",quantile="0.5"} NaN
+http_request_size_bytes{handler="version",quantile="0.9"} NaN
+http_request_size_bytes{handler="version",quantile="0.99"} NaN
+http_request_size_bytes_sum{handler="version"} 0
+http_request_size_bytes_count{handler="version"} 0
+# HELP http_requests_total Total number of HTTP requests made.
+# TYPE http_requests_total counter
+http_requests_total{code="200",handler="graph",method="get"} 3
+http_requests_total{code="200",handler="label_values",method="get"} 3
+http_requests_total{code="200",handler="prometheus",method="get"} 462
+http_requests_total{code="200",handler="query",method="get"} 6
+http_requests_total{code="200",handler="static",method="get"} 3
+# HELP http_response_size_bytes The HTTP response sizes in bytes.
+# TYPE http_response_size_bytes summary
+http_response_size_bytes{handler="alerts",quantile="0.5"} NaN
+http_response_size_bytes{handler="alerts",quantile="0.9"} NaN
+http_response_size_bytes{handler="alerts",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="alerts"} 0
+http_response_size_bytes_count{handler="alerts"} 0
+http_response_size_bytes{handler="config",quantile="0.5"} NaN
+http_response_size_bytes{handler="config",quantile="0.9"} NaN
+http_response_size_bytes{handler="config",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="config"} 0
+http_response_size_bytes_count{handler="config"} 0
+http_response_size_bytes{handler="consoles",quantile="0.5"} NaN
+http_response_size_bytes{handler="consoles",quantile="0.9"} NaN
+http_response_size_bytes{handler="consoles",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="consoles"} 0
+http_response_size_bytes_count{handler="consoles"} 0
+http_response_size_bytes{handler="drop_series",quantile="0.5"} NaN
+http_response_size_bytes{handler="drop_series",quantile="0.9"} NaN
+http_response_size_bytes{handler="drop_series",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="drop_series"} 0
+http_response_size_bytes_count{handler="drop_series"} 0
+http_response_size_bytes{handler="federate",quantile="0.5"} NaN
+http_response_size_bytes{handler="federate",quantile="0.9"} NaN
+http_response_size_bytes{handler="federate",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="federate"} 0
+http_response_size_bytes_count{handler="federate"} 0
+http_response_size_bytes{handler="flags",quantile="0.5"} NaN
+http_response_size_bytes{handler="flags",quantile="0.9"} NaN
+http_response_size_bytes{handler="flags",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="flags"} 0
+http_response_size_bytes_count{handler="flags"} 0
+http_response_size_bytes{handler="graph",quantile="0.5"} 3619
+http_response_size_bytes{handler="graph",quantile="0.9"} 3619
+http_response_size_bytes{handler="graph",quantile="0.99"} 3619
+http_response_size_bytes_sum{handler="graph"} 10857
+http_response_size_bytes_count{handler="graph"} 3
+http_response_size_bytes{handler="heap",quantile="0.5"} NaN
+http_response_size_bytes{handler="heap",quantile="0.9"} NaN
+http_response_size_bytes{handler="heap",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="heap"} 0
+http_response_size_bytes_count{handler="heap"} 0
+http_response_size_bytes{handler="label_values",quantile="0.5"} 642
+http_response_size_bytes{handler="label_values",quantile="0.9"} 642
+http_response_size_bytes{handler="label_values",quantile="0.99"} 642
+http_response_size_bytes_sum{handler="label_values"} 1926
+http_response_size_bytes_count{handler="label_values"} 3
+http_response_size_bytes{handler="options",quantile="0.5"} NaN
+http_response_size_bytes{handler="options",quantile="0.9"} NaN
+http_response_size_bytes{handler="options",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="options"} 0
+http_response_size_bytes_count{handler="options"} 0
+http_response_size_bytes{handler="prometheus",quantile="0.5"} 3033
+http_response_size_bytes{handler="prometheus",quantile="0.9"} 3123
+http_response_size_bytes{handler="prometheus",quantile="0.99"} 3128
+http_response_size_bytes_sum{handler="prometheus"} 1.374097e+06
+http_response_size_bytes_count{handler="prometheus"} 462
+http_response_size_bytes{handler="query",quantile="0.5"} 776
+http_response_size_bytes{handler="query",quantile="0.9"} 781
+http_response_size_bytes{handler="query",quantile="0.99"} 781
+http_response_size_bytes_sum{handler="query"} 4656
+http_response_size_bytes_count{handler="query"} 6
+http_response_size_bytes{handler="query_range",quantile="0.5"} NaN
+http_response_size_bytes{handler="query_range",quantile="0.9"} NaN
+http_response_size_bytes{handler="query_range",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="query_range"} 0
+http_response_size_bytes_count{handler="query_range"} 0
+http_response_size_bytes{handler="rules",quantile="0.5"} NaN
+http_response_size_bytes{handler="rules",quantile="0.9"} NaN
+http_response_size_bytes{handler="rules",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="rules"} 0
+http_response_size_bytes_count{handler="rules"} 0
+http_response_size_bytes{handler="series",quantile="0.5"} NaN
+http_response_size_bytes{handler="series",quantile="0.9"} NaN
+http_response_size_bytes{handler="series",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="series"} 0
+http_response_size_bytes_count{handler="series"} 0
+http_response_size_bytes{handler="static",quantile="0.5"} 6316
+http_response_size_bytes{handler="static",quantile="0.9"} 6316
+http_response_size_bytes{handler="static",quantile="0.99"} 6316
+http_response_size_bytes_sum{handler="static"} 18948
+http_response_size_bytes_count{handler="static"} 3
+http_response_size_bytes{handler="status",quantile="0.5"} NaN
+http_response_size_bytes{handler="status",quantile="0.9"} NaN
+http_response_size_bytes{handler="status",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="status"} 0
+http_response_size_bytes_count{handler="status"} 0
+http_response_size_bytes{handler="targets",quantile="0.5"} NaN
+http_response_size_bytes{handler="targets",quantile="0.9"} NaN
+http_response_size_bytes{handler="targets",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="targets"} 0
+http_response_size_bytes_count{handler="targets"} 0
+http_response_size_bytes{handler="version",quantile="0.5"} NaN
+http_response_size_bytes{handler="version",quantile="0.9"} NaN
+http_response_size_bytes{handler="version",quantile="0.99"} NaN
+http_response_size_bytes_sum{handler="version"} 0
+http_response_size_bytes_count{handler="version"} 0
+# HELP prometheus_build_info A metric with a constant '1' value labeled by version, revision, branch, and goversion from which prometheus was built.
+# TYPE prometheus_build_info gauge
+prometheus_build_info{branch="",goversion="go1.7.3",revision="",version=""} 1
+# HELP prometheus_config_last_reload_success_timestamp_seconds Timestamp of the last successful configuration reload.
+# TYPE prometheus_config_last_reload_success_timestamp_seconds gauge
+prometheus_config_last_reload_success_timestamp_seconds 1.484395547e+09
+# HELP prometheus_config_last_reload_successful Whether the last configuration reload attempt was successful.
+# TYPE prometheus_config_last_reload_successful gauge
+prometheus_config_last_reload_successful 1
+# HELP prometheus_evaluator_duration_seconds The duration of rule group evaluations.
+# TYPE prometheus_evaluator_duration_seconds summary
+prometheus_evaluator_duration_seconds{quantile="0.01"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.05"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.5"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.9"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds{quantile="0.99"} 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds_sum 1.7890000000000002e-06
+prometheus_evaluator_duration_seconds_count 1
+# HELP prometheus_evaluator_iterations_skipped_total The total number of rule group evaluations skipped due to throttled metric storage.
+# TYPE prometheus_evaluator_iterations_skipped_total counter
+prometheus_evaluator_iterations_skipped_total 0
+# HELP prometheus_notifications_dropped_total Total number of alerts dropped due to alert manager missing in configuration.
+# TYPE prometheus_notifications_dropped_total counter
+prometheus_notifications_dropped_total 0
+# HELP prometheus_notifications_queue_capacity The capacity of the alert notifications queue.
+# TYPE prometheus_notifications_queue_capacity gauge
+prometheus_notifications_queue_capacity 10000
+# HELP prometheus_notifications_queue_length The number of alert notifications in the queue.
+# TYPE prometheus_notifications_queue_length gauge
+prometheus_notifications_queue_length 0
+# HELP prometheus_rule_evaluation_failures_total The total number of rule evaluation failures.
+# TYPE prometheus_rule_evaluation_failures_total counter
+prometheus_rule_evaluation_failures_total{rule_type="alerting"} 0
+prometheus_rule_evaluation_failures_total{rule_type="recording"} 0
+# HELP prometheus_sd_azure_refresh_duration_seconds The duration of a Azure-SD refresh in seconds.
+# TYPE prometheus_sd_azure_refresh_duration_seconds summary
+prometheus_sd_azure_refresh_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_azure_refresh_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_azure_refresh_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_azure_refresh_duration_seconds_sum 0
+prometheus_sd_azure_refresh_duration_seconds_count 0
+# HELP prometheus_sd_azure_refresh_failures_total Number of Azure-SD refresh failures.
+# TYPE prometheus_sd_azure_refresh_failures_total counter
+prometheus_sd_azure_refresh_failures_total 0
+# HELP prometheus_sd_consul_rpc_duration_seconds The duration of a Consul RPC call in seconds.
+# TYPE prometheus_sd_consul_rpc_duration_seconds summary
+prometheus_sd_consul_rpc_duration_seconds{call="service",endpoint="catalog",quantile="0.5"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="service",endpoint="catalog",quantile="0.9"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="service",endpoint="catalog",quantile="0.99"} NaN
+prometheus_sd_consul_rpc_duration_seconds_sum{call="service",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_duration_seconds_count{call="service",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_duration_seconds{call="services",endpoint="catalog",quantile="0.5"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="services",endpoint="catalog",quantile="0.9"} NaN
+prometheus_sd_consul_rpc_duration_seconds{call="services",endpoint="catalog",quantile="0.99"} NaN
+prometheus_sd_consul_rpc_duration_seconds_sum{call="services",endpoint="catalog"} 0
+prometheus_sd_consul_rpc_duration_seconds_count{call="services",endpoint="catalog"} 0
+# HELP prometheus_sd_consul_rpc_failures_total The number of Consul RPC call failures.
+# TYPE prometheus_sd_consul_rpc_failures_total counter
+prometheus_sd_consul_rpc_failures_total 0
+# HELP prometheus_sd_dns_lookup_failures_total The number of DNS-SD lookup failures.
+# TYPE prometheus_sd_dns_lookup_failures_total counter
+prometheus_sd_dns_lookup_failures_total 0
+# HELP prometheus_sd_dns_lookups_total The number of DNS-SD lookups.
+# TYPE prometheus_sd_dns_lookups_total counter
+prometheus_sd_dns_lookups_total 0
+# HELP prometheus_sd_ec2_refresh_duration_seconds The duration of a EC2-SD refresh in seconds.
+# TYPE prometheus_sd_ec2_refresh_duration_seconds summary
+prometheus_sd_ec2_refresh_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_ec2_refresh_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_ec2_refresh_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_ec2_refresh_duration_seconds_sum 0
+prometheus_sd_ec2_refresh_duration_seconds_count 0
+# HELP prometheus_sd_ec2_refresh_failures_total The number of EC2-SD scrape failures.
+# TYPE prometheus_sd_ec2_refresh_failures_total counter
+prometheus_sd_ec2_refresh_failures_total 0
+# HELP prometheus_sd_file_read_errors_total The number of File-SD read errors.
+# TYPE prometheus_sd_file_read_errors_total counter
+prometheus_sd_file_read_errors_total 0
+# HELP prometheus_sd_file_scan_duration_seconds The duration of the File-SD scan in seconds.
+# TYPE prometheus_sd_file_scan_duration_seconds summary
+prometheus_sd_file_scan_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_file_scan_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_file_scan_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_file_scan_duration_seconds_sum 0
+prometheus_sd_file_scan_duration_seconds_count 0
+# HELP prometheus_sd_gce_refresh_duration The duration of a GCE-SD refresh in seconds.
+# TYPE prometheus_sd_gce_refresh_duration summary
+prometheus_sd_gce_refresh_duration{quantile="0.5"} NaN
+prometheus_sd_gce_refresh_duration{quantile="0.9"} NaN
+prometheus_sd_gce_refresh_duration{quantile="0.99"} NaN
+prometheus_sd_gce_refresh_duration_sum 0
+prometheus_sd_gce_refresh_duration_count 0
+# HELP prometheus_sd_gce_refresh_failures_total The number of GCE-SD refresh failures.
+# TYPE prometheus_sd_gce_refresh_failures_total counter
+prometheus_sd_gce_refresh_failures_total 0
+# HELP prometheus_sd_kubernetes_events_total The number of Kubernetes events handled.
+# TYPE prometheus_sd_kubernetes_events_total counter
+prometheus_sd_kubernetes_events_total{event="add",role="endpoints"} 0
+prometheus_sd_kubernetes_events_total{event="add",role="node"} 0
+prometheus_sd_kubernetes_events_total{event="add",role="pod"} 0
+prometheus_sd_kubernetes_events_total{event="add",role="service"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="endpoints"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="node"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="pod"} 0
+prometheus_sd_kubernetes_events_total{event="delete",role="service"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="endpoints"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="node"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="pod"} 0
+prometheus_sd_kubernetes_events_total{event="update",role="service"} 0
+# HELP prometheus_sd_marathon_refresh_duration_seconds The duration of a Marathon-SD refresh in seconds.
+# TYPE prometheus_sd_marathon_refresh_duration_seconds summary
+prometheus_sd_marathon_refresh_duration_seconds{quantile="0.5"} NaN
+prometheus_sd_marathon_refresh_duration_seconds{quantile="0.9"} NaN
+prometheus_sd_marathon_refresh_duration_seconds{quantile="0.99"} NaN
+prometheus_sd_marathon_refresh_duration_seconds_sum 0
+prometheus_sd_marathon_refresh_duration_seconds_count 0
+# HELP prometheus_sd_marathon_refresh_failures_total The number of Marathon-SD refresh failures.
+# TYPE prometheus_sd_marathon_refresh_failures_total counter
+prometheus_sd_marathon_refresh_failures_total 0
+# HELP prometheus_target_interval_length_seconds Actual intervals between scrapes.
+# TYPE prometheus_target_interval_length_seconds summary
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.01"} 0.046182157
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.05"} 0.047306979000000006
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.5"} 0.050381782
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.9"} 0.052614556
+prometheus_target_interval_length_seconds{interval="50ms",quantile="0.99"} 0.054404386000000006
+prometheus_target_interval_length_seconds_sum{interval="50ms"} 34.512091221999995
+prometheus_target_interval_length_seconds_count{interval="50ms"} 685
+# HELP prometheus_target_scrape_pool_sync_total Total number of syncs that were executed on a scrape pool.
+# TYPE prometheus_target_scrape_pool_sync_total counter
+prometheus_target_scrape_pool_sync_total{scrape_job="prometheus"} 1
+# HELP prometheus_target_skipped_scrapes_total Total number of scrapes that were skipped because the metric storage was throttled.
+# TYPE prometheus_target_skipped_scrapes_total counter
+prometheus_target_skipped_scrapes_total 0
+# HELP prometheus_target_sync_length_seconds Actual interval to sync the scrape pool.
+# TYPE prometheus_target_sync_length_seconds summary
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.01"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.05"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.5"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.9"} 0.00020043300000000002
+prometheus_target_sync_length_seconds{scrape_job="prometheus",quantile="0.99"} 0.00020043300000000002
+prometheus_target_sync_length_seconds_sum{scrape_job="prometheus"} 0.00020043300000000002
+prometheus_target_sync_length_seconds_count{scrape_job="prometheus"} 1
+# HELP prometheus_treecache_watcher_goroutines The current number of watcher goroutines.
+# TYPE prometheus_treecache_watcher_goroutines gauge
+prometheus_treecache_watcher_goroutines 0
+# HELP prometheus_treecache_zookeeper_failures_total The total number of ZooKeeper failures.
+# TYPE prometheus_treecache_zookeeper_failures_total counter
+prometheus_treecache_zookeeper_failures_total 0 \ No newline at end of file
diff --git a/src/go/collectors/go.d.plugin/pkg/socket/client.go b/src/go/collectors/go.d.plugin/pkg/socket/client.go
new file mode 100644
index 000000000..26ae1dfa6
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/socket/client.go
@@ -0,0 +1,106 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package socket
+
+import (
+ "bufio"
+ "crypto/tls"
+ "errors"
+ "net"
+ "time"
+)
+
+// New returns a new pointer to a socket client given the socket
+// type (IP, TCP, UDP, UNIX), a network address (IP/domain:port),
+// a timeout and a TLS config. It supports both IPv4 and IPv6 address
+// and reuses connection where possible.
+func New(config Config) *Socket {
+ return &Socket{
+ Config: config,
+ conn: nil,
+ }
+}
+
+// Socket is the implementation of a socket client.
+type Socket struct {
+ Config
+ conn net.Conn
+}
+
+// Connect connects to the Socket address on the named network.
+// If the address is a domain name it will also perform the DNS resolution.
+// Address like :80 will attempt to connect to the localhost.
+// The config timeout and TLS config will be used.
+func (s *Socket) Connect() error {
+ network, address := networkType(s.Address)
+ var conn net.Conn
+ var err error
+
+ if s.TLSConf == nil {
+ conn, err = net.DialTimeout(network, address, s.ConnectTimeout)
+ } else {
+ var d net.Dialer
+ d.Timeout = s.ConnectTimeout
+ conn, err = tls.DialWithDialer(&d, network, address, s.TLSConf)
+ }
+ if err != nil {
+ return err
+ }
+
+ s.conn = conn
+
+ return nil
+}
+
+// Disconnect closes the connection.
+// Any in-flight commands will be cancelled and return errors.
+func (s *Socket) Disconnect() (err error) {
+ if s.conn != nil {
+ err = s.conn.Close()
+ s.conn = nil
+ }
+ return err
+}
+
+// Command writes the command string to the connection and passed the
+// response bytes line by line to the process function. It uses the
+// timeout value from the Socket config and returns read, write and
+// timeout errors if any. If a timeout occurs during the processing
+// of the responses this function will stop processing and return a
+// timeout error.
+func (s *Socket) Command(command string, process Processor) error {
+ if s.conn == nil {
+ return errors.New("cannot send command on nil connection")
+ }
+ if err := write(command, s.conn, s.WriteTimeout); err != nil {
+ return err
+ }
+ return read(s.conn, process, s.ReadTimeout)
+}
+
+func write(command string, writer net.Conn, timeout time.Duration) error {
+ if writer == nil {
+ return errors.New("attempt to write on nil connection")
+ }
+ if err := writer.SetWriteDeadline(time.Now().Add(timeout)); err != nil {
+ return err
+ }
+ _, err := writer.Write([]byte(command))
+ return err
+}
+
+func read(reader net.Conn, process Processor, timeout time.Duration) error {
+ if process == nil {
+ return errors.New("process func is nil")
+ }
+ if reader == nil {
+ return errors.New("attempt to read on nil connection")
+ }
+ if err := reader.SetReadDeadline(time.Now().Add(timeout)); err != nil {
+ return err
+ }
+ scanner := bufio.NewScanner(reader)
+ for scanner.Scan() && process(scanner.Bytes()) {
+ }
+ return scanner.Err()
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/socket/client_test.go b/src/go/collectors/go.d.plugin/pkg/socket/client_test.go
new file mode 100644
index 000000000..fa64f4558
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/socket/client_test.go
@@ -0,0 +1,163 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package socket
+
+import (
+ "crypto/tls"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+const (
+ testServerAddress = "127.0.0.1:9999"
+ testUdpServerAddress = "udp://127.0.0.1:9999"
+ testUnixServerAddress = "/tmp/testSocketFD"
+ defaultTimeout = 100 * time.Millisecond
+)
+
+var tcpConfig = Config{
+ Address: testServerAddress,
+ ConnectTimeout: defaultTimeout,
+ ReadTimeout: defaultTimeout,
+ WriteTimeout: defaultTimeout,
+ TLSConf: nil,
+}
+
+var udpConfig = Config{
+ Address: testUdpServerAddress,
+ ConnectTimeout: defaultTimeout,
+ ReadTimeout: defaultTimeout,
+ WriteTimeout: defaultTimeout,
+ TLSConf: nil,
+}
+
+var unixConfig = Config{
+ Address: testUnixServerAddress,
+ ConnectTimeout: defaultTimeout,
+ ReadTimeout: defaultTimeout,
+ WriteTimeout: defaultTimeout,
+ TLSConf: nil,
+}
+
+var tcpTlsConfig = Config{
+ Address: testServerAddress,
+ ConnectTimeout: defaultTimeout,
+ ReadTimeout: defaultTimeout,
+ WriteTimeout: defaultTimeout,
+ TLSConf: &tls.Config{},
+}
+
+func Test_clientCommand(t *testing.T) {
+ srv := &tcpServer{addr: testServerAddress, rowsNumResp: 1}
+ go func() { _ = srv.Run(); defer func() { _ = srv.Close() }() }()
+
+ time.Sleep(time.Millisecond * 100)
+ sock := New(tcpConfig)
+ require.NoError(t, sock.Connect())
+ err := sock.Command("ping\n", func(bytes []byte) bool {
+ assert.Equal(t, "pong", string(bytes))
+ return true
+ })
+ require.NoError(t, sock.Disconnect())
+ require.NoError(t, err)
+}
+
+func Test_clientTimeout(t *testing.T) {
+ srv := &tcpServer{addr: testServerAddress, rowsNumResp: 1}
+ go func() { _ = srv.Run() }()
+
+ time.Sleep(time.Millisecond * 100)
+ sock := New(tcpConfig)
+ require.NoError(t, sock.Connect())
+ sock.ReadTimeout = 0
+ sock.ReadTimeout = 0
+ err := sock.Command("ping\n", func(bytes []byte) bool {
+ assert.Equal(t, "pong", string(bytes))
+ return true
+ })
+ require.Error(t, err)
+}
+
+func Test_clientIncompleteSSL(t *testing.T) {
+ srv := &tcpServer{addr: testServerAddress, rowsNumResp: 1}
+ go func() { _ = srv.Run() }()
+
+ time.Sleep(time.Millisecond * 100)
+ sock := New(tcpTlsConfig)
+ err := sock.Connect()
+ require.Error(t, err)
+}
+
+func Test_clientCommandStopProcessing(t *testing.T) {
+ srv := &tcpServer{addr: testServerAddress, rowsNumResp: 2}
+ go func() { _ = srv.Run() }()
+
+ time.Sleep(time.Millisecond * 100)
+ sock := New(tcpConfig)
+ require.NoError(t, sock.Connect())
+ err := sock.Command("ping\n", func(bytes []byte) bool {
+ assert.Equal(t, "pong", string(bytes))
+ return false
+ })
+ require.NoError(t, sock.Disconnect())
+ require.NoError(t, err)
+}
+
+func Test_clientUDPCommand(t *testing.T) {
+ srv := &udpServer{addr: testServerAddress, rowsNumResp: 1}
+ go func() { _ = srv.Run(); defer func() { _ = srv.Close() }() }()
+
+ time.Sleep(time.Millisecond * 100)
+ sock := New(udpConfig)
+ require.NoError(t, sock.Connect())
+ err := sock.Command("ping\n", func(bytes []byte) bool {
+ assert.Equal(t, "pong", string(bytes))
+ return false
+ })
+ require.NoError(t, sock.Disconnect())
+ require.NoError(t, err)
+}
+
+func Test_clientTCPAddress(t *testing.T) {
+ srv := &tcpServer{addr: testServerAddress, rowsNumResp: 1}
+ go func() { _ = srv.Run() }()
+ time.Sleep(time.Millisecond * 100)
+
+ sock := New(tcpConfig)
+ require.NoError(t, sock.Connect())
+
+ tcpConfig.Address = "tcp://" + tcpConfig.Address
+ sock = New(tcpConfig)
+ require.NoError(t, sock.Connect())
+}
+
+func Test_clientUnixCommand(t *testing.T) {
+ srv := &unixServer{addr: testUnixServerAddress, rowsNumResp: 1}
+ // cleanup previous file descriptors
+ _ = srv.Close()
+ go func() { _ = srv.Run() }()
+
+ time.Sleep(time.Millisecond * 200)
+ sock := New(unixConfig)
+ require.NoError(t, sock.Connect())
+ err := sock.Command("ping\n", func(bytes []byte) bool {
+ assert.Equal(t, "pong", string(bytes))
+ return false
+ })
+ require.NoError(t, err)
+ require.NoError(t, sock.Disconnect())
+}
+
+func Test_clientEmptyProcessFunc(t *testing.T) {
+ srv := &tcpServer{addr: testServerAddress, rowsNumResp: 1}
+ go func() { _ = srv.Run() }()
+
+ time.Sleep(time.Millisecond * 100)
+ sock := New(tcpConfig)
+ require.NoError(t, sock.Connect())
+ err := sock.Command("ping\n", nil)
+ require.Error(t, err, "nil process func should return an error")
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/socket/servers_test.go b/src/go/collectors/go.d.plugin/pkg/socket/servers_test.go
new file mode 100644
index 000000000..d66178162
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/socket/servers_test.go
@@ -0,0 +1,139 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package socket
+
+import (
+ "bufio"
+ "errors"
+ "fmt"
+ "net"
+ "os"
+ "strings"
+ "time"
+)
+
+type tcpServer struct {
+ addr string
+ server net.Listener
+ rowsNumResp int
+}
+
+func (t *tcpServer) Run() (err error) {
+ t.server, err = net.Listen("tcp", t.addr)
+ if err != nil {
+ return
+ }
+ return t.handleConnections()
+}
+
+func (t *tcpServer) Close() (err error) {
+ return t.server.Close()
+}
+
+func (t *tcpServer) handleConnections() (err error) {
+ for {
+ conn, err := t.server.Accept()
+ if err != nil || conn == nil {
+ return errors.New("could not accept connection")
+ }
+ t.handleConnection(conn)
+ }
+}
+
+func (t *tcpServer) handleConnection(conn net.Conn) {
+ defer func() { _ = conn.Close() }()
+ _ = conn.SetDeadline(time.Now().Add(time.Millisecond * 100))
+
+ rw := bufio.NewReadWriter(bufio.NewReader(conn), bufio.NewWriter(conn))
+ _, err := rw.ReadString('\n')
+ if err != nil {
+ _, _ = rw.WriteString("failed to read input")
+ _ = rw.Flush()
+ } else {
+ resp := strings.Repeat("pong\n", t.rowsNumResp)
+ _, _ = rw.WriteString(resp)
+ _ = rw.Flush()
+ }
+}
+
+type udpServer struct {
+ addr string
+ conn *net.UDPConn
+ rowsNumResp int
+}
+
+func (u *udpServer) Run() (err error) {
+ addr, err := net.ResolveUDPAddr("udp", u.addr)
+ if err != nil {
+ return err
+ }
+ u.conn, err = net.ListenUDP("udp", addr)
+ if err != nil {
+ return
+ }
+ u.handleConnections()
+ return nil
+}
+
+func (u *udpServer) Close() (err error) {
+ return u.conn.Close()
+}
+
+func (u *udpServer) handleConnections() {
+ for {
+ var buf [2048]byte
+ _, addr, _ := u.conn.ReadFromUDP(buf[0:])
+ resp := strings.Repeat("pong\n", u.rowsNumResp)
+ _, _ = u.conn.WriteToUDP([]byte(resp), addr)
+ }
+}
+
+type unixServer struct {
+ addr string
+ conn *net.UnixListener
+ rowsNumResp int
+}
+
+func (u *unixServer) Run() (err error) {
+ _, _ = os.CreateTemp("/tmp", "testSocketFD")
+ addr, err := net.ResolveUnixAddr("unix", u.addr)
+ if err != nil {
+ return err
+ }
+ u.conn, err = net.ListenUnix("unix", addr)
+ if err != nil {
+ return
+ }
+ go u.handleConnections()
+ return nil
+}
+
+func (u *unixServer) Close() (err error) {
+ _ = os.Remove(testUnixServerAddress)
+ return u.conn.Close()
+}
+
+func (u *unixServer) handleConnections() {
+ var conn net.Conn
+ var err error
+ conn, err = u.conn.AcceptUnix()
+ if err != nil {
+ panic(fmt.Errorf("could not accept connection: %v", err))
+ }
+ u.handleConnection(conn)
+}
+
+func (u *unixServer) handleConnection(conn net.Conn) {
+ _ = conn.SetDeadline(time.Now().Add(time.Second))
+
+ rw := bufio.NewReadWriter(bufio.NewReader(conn), bufio.NewWriter(conn))
+ _, err := rw.ReadString('\n')
+ if err != nil {
+ _, _ = rw.WriteString("failed to read input")
+ _ = rw.Flush()
+ } else {
+ resp := strings.Repeat("pong\n", u.rowsNumResp)
+ _, _ = rw.WriteString(resp)
+ _ = rw.Flush()
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/socket/types.go b/src/go/collectors/go.d.plugin/pkg/socket/types.go
new file mode 100644
index 000000000..693faf5be
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/socket/types.go
@@ -0,0 +1,41 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package socket
+
+import (
+ "crypto/tls"
+ "time"
+)
+
+// Processor function passed to the Socket.Command function.
+// It is passed by the caller to process a command's response
+// line by line.
+type Processor func([]byte) bool
+
+// Client is the interface that wraps the basic socket client operations
+// and hides the implementation details from the users.
+//
+// Connect should prepare the connection.
+//
+// Disconnect should stop any in-flight connections.
+//
+// Command should send the actual data to the wire and pass
+// any results to the processor function.
+//
+// Implementations should return TCP, UDP or Unix ready sockets.
+type Client interface {
+ Connect() error
+ Disconnect() error
+ Command(command string, process Processor) error
+}
+
+// Config holds the network ip v4 or v6 address, port,
+// Socket type(ip, tcp, udp, unix), timeout and TLS configuration
+// for a Socket
+type Config struct {
+ Address string
+ ConnectTimeout time.Duration
+ ReadTimeout time.Duration
+ WriteTimeout time.Duration
+ TLSConf *tls.Config
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/socket/utils.go b/src/go/collectors/go.d.plugin/pkg/socket/utils.go
new file mode 100644
index 000000000..dcc48b383
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/socket/utils.go
@@ -0,0 +1,25 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package socket
+
+import "strings"
+
+func IsUnixSocket(address string) bool {
+ return strings.HasPrefix(address, "/") || strings.HasPrefix(address, "unix://")
+}
+
+func IsUdpSocket(address string) bool {
+ return strings.HasPrefix(address, "udp://")
+}
+
+func networkType(address string) (string, string) {
+ switch {
+ case IsUnixSocket(address):
+ address = strings.TrimPrefix(address, "unix://")
+ return "unix", address
+ case IsUdpSocket(address):
+ return "udp", strings.TrimPrefix(address, "udp://")
+ default:
+ return "tcp", strings.TrimPrefix(address, "tcp://")
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/stm/stm.go b/src/go/collectors/go.d.plugin/pkg/stm/stm.go
new file mode 100644
index 000000000..7d07ba9a4
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/stm/stm.go
@@ -0,0 +1,172 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package stm
+
+import (
+ "fmt"
+ "log"
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+const (
+ fieldTagName = "stm"
+ structKey = "STMKey"
+)
+
+type (
+ Value interface {
+ WriteTo(rv map[string]int64, key string, mul, div int)
+ }
+)
+
+// ToMap converts struct to a map[string]int64 based on 'stm' tags
+func ToMap(s ...interface{}) map[string]int64 {
+ rv := map[string]int64{}
+ for _, v := range s {
+ value := reflect.Indirect(reflect.ValueOf(v))
+ toMap(value, rv, "", 1, 1)
+ }
+ return rv
+}
+
+func toMap(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ if !value.IsValid() {
+ log.Panicf("value is not valid key=%s", key)
+ }
+ if value.CanInterface() {
+ val, ok := value.Interface().(Value)
+ if ok {
+ val.WriteTo(rv, key, mul, div)
+ return
+ }
+ }
+ switch value.Kind() {
+ case reflect.Ptr:
+ convertPtr(value, rv, key, mul, div)
+ case reflect.Struct:
+ convertStruct(value, rv, key)
+ case reflect.Array, reflect.Slice:
+ convertArraySlice(value, rv, key, mul, div)
+ case reflect.Map:
+ convertMap(value, rv, key, mul, div)
+ case reflect.Bool:
+ convertBool(value, rv, key)
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ convertInteger(value, rv, key, mul, div)
+ case reflect.Float32, reflect.Float64:
+ convertFloat(value, rv, key, mul, div)
+ case reflect.Interface:
+ convertInterface(value, rv, key, mul, div)
+ default:
+ log.Panicf("unsupported data type: %v", value.Kind())
+ }
+}
+
+func convertPtr(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ if !value.IsNil() {
+ toMap(value.Elem(), rv, key, mul, div)
+ }
+}
+
+func convertStruct(value reflect.Value, rv map[string]int64, key string) {
+ t := value.Type()
+ k := value.FieldByName(structKey)
+ if k.Kind() == reflect.String {
+ key = joinPrefix(key, k.String())
+ }
+ for i := 0; i < t.NumField(); i++ {
+ ft := t.Field(i)
+ tag, ok := ft.Tag.Lookup(fieldTagName)
+ if !ok || ft.Name == structKey {
+ continue
+ }
+ value := value.Field(i)
+ prefix, mul, div := parseTag(tag)
+ toMap(value, rv, joinPrefix(key, prefix), mul, div)
+ }
+}
+
+func convertMap(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ if value.IsNil() {
+ log.Panicf("value is nil key=%s", key)
+ }
+ for _, k := range value.MapKeys() {
+ toMap(value.MapIndex(k), rv, joinPrefix(key, k.String()), mul, div)
+ }
+}
+
+func convertArraySlice(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ for i := 0; i < value.Len(); i++ {
+ toMap(value.Index(i), rv, key, mul, div)
+ }
+}
+
+func convertBool(value reflect.Value, rv map[string]int64, key string) {
+ if _, ok := rv[key]; ok {
+ log.Panic("duplicate key: ", key)
+ }
+ if value.Bool() {
+ rv[key] = 1
+ } else {
+ rv[key] = 0
+ }
+}
+
+func convertInteger(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ if _, ok := rv[key]; ok {
+ log.Panic("duplicate key: ", key)
+ }
+ intVal := value.Int()
+ rv[key] = intVal * int64(mul) / int64(div)
+}
+
+func convertFloat(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ if _, ok := rv[key]; ok {
+ log.Panic("duplicate key: ", key)
+ }
+ floatVal := value.Float()
+ rv[key] = int64(floatVal * float64(mul) / float64(div))
+}
+
+func convertInterface(value reflect.Value, rv map[string]int64, key string, mul, div int) {
+ fv := reflect.ValueOf(value.Interface())
+ toMap(fv, rv, key, mul, div)
+}
+
+func joinPrefix(prefix, key string) string {
+ if prefix == "" {
+ return key
+ }
+ if key == "" {
+ return prefix
+ }
+ return prefix + "_" + key
+}
+
+func parseTag(tag string) (prefix string, mul int, div int) {
+ tokens := strings.Split(tag, ",")
+ mul = 1
+ div = 1
+ var err error
+ switch len(tokens) {
+ case 3:
+ div, err = strconv.Atoi(tokens[2])
+ if err != nil {
+ log.Panic(err)
+ }
+ fallthrough
+ case 2:
+ mul, err = strconv.Atoi(tokens[1])
+ if err != nil {
+ log.Panic(err)
+ }
+ fallthrough
+ case 1:
+ prefix = tokens[0]
+ default:
+ log.Panic(fmt.Errorf("invalid tag format: %s", tag))
+ }
+ return
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/stm/stm_test.go b/src/go/collectors/go.d.plugin/pkg/stm/stm_test.go
new file mode 100644
index 000000000..04d63b32d
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/stm/stm_test.go
@@ -0,0 +1,415 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package stm_test
+
+import (
+ "testing"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/stm"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/metrics"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestToMap_empty(t *testing.T) {
+ s := struct{}{}
+
+ expected := map[string]int64{}
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_metrics(t *testing.T) {
+ s := struct {
+ C metrics.Counter `stm:"c"`
+ G metrics.Gauge `stm:"g,100"`
+ H metrics.Histogram `stm:"h,100"`
+ S metrics.Summary `stm:"s,200,2"`
+ }{}
+ s.C.Inc()
+ s.G.Set(3.14)
+ s.H = metrics.NewHistogram([]float64{1, 5, 10})
+
+ s.H.Observe(3.14)
+ s.H.Observe(6.28)
+ s.H.Observe(20)
+
+ s.S = metrics.NewSummary()
+ s.S.Observe(3.14)
+ s.S.Observe(6.28)
+
+ expected := map[string]int64{
+ "c": 1,
+ "g": 314,
+
+ "h_count": 3,
+ "h_sum": 2942,
+ "h_bucket_1": 0,
+ "h_bucket_2": 1,
+ "h_bucket_3": 2,
+
+ "s_count": 2,
+ "s_sum": 942,
+ "s_min": 314,
+ "s_max": 628,
+ "s_avg": 471,
+ }
+
+ assert.Equal(t, expected, stm.ToMap(s), "value test")
+ assert.Equal(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_int(t *testing.T) {
+ s := struct {
+ I int `stm:"int"`
+ I8 int8 `stm:"int8"`
+ I16 int16 `stm:"int16"`
+ I32 int32 `stm:"int32"`
+ I64 int64 `stm:"int64"`
+ }{
+ I: 1, I8: 2, I16: 3, I32: 4, I64: 5,
+ }
+
+ expected := map[string]int64{
+ "int": 1, "int8": 2, "int16": 3, "int32": 4, "int64": 5,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_float(t *testing.T) {
+ s := struct {
+ F32 float32 `stm:"f32,100"`
+ F64 float64 `stm:"f64"`
+ }{
+ 3.14, 628,
+ }
+
+ expected := map[string]int64{
+ "f32": 314, "f64": 628,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_struct(t *testing.T) {
+ type pair struct {
+ Left int `stm:"left"`
+ Right int `stm:"right"`
+ }
+ s := struct {
+ I int `stm:"int"`
+ Pempty pair `stm:""`
+ Ps pair `stm:"s"`
+ Notag int
+ }{
+ I: 1,
+ Pempty: pair{2, 3},
+ Ps: pair{4, 5},
+ Notag: 6,
+ }
+
+ expected := map[string]int64{
+ "int": 1,
+ "left": 2, "right": 3,
+ "s_left": 4, "s_right": 5,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_tree(t *testing.T) {
+ type node struct {
+ Value int `stm:"v"`
+ Left *node `stm:"left"`
+ Right *node `stm:"right"`
+ }
+ s := node{1,
+ &node{2, nil, nil},
+ &node{3,
+ &node{4, nil, nil},
+ nil,
+ },
+ }
+ expected := map[string]int64{
+ "v": 1,
+ "left_v": 2,
+ "right_v": 3,
+ "right_left_v": 4,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_map(t *testing.T) {
+ s := struct {
+ I int `stm:"int"`
+ M map[string]int64 `stm:""`
+ }{
+ I: 1,
+ M: map[string]int64{
+ "a": 2,
+ "b": 3,
+ },
+ }
+
+ expected := map[string]int64{
+ "int": 1,
+ "a": 2,
+ "b": 3,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_nestMap(t *testing.T) {
+ s := struct {
+ I int `stm:"int"`
+ M map[string]interface{} `stm:""`
+ }{
+ I: 1,
+ M: map[string]interface{}{
+ "a": 2,
+ "b": 3,
+ "m": map[string]interface{}{
+ "c": 4,
+ },
+ },
+ }
+
+ expected := map[string]int64{
+ "int": 1,
+ "a": 2,
+ "b": 3,
+ "m_c": 4,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_ptr(t *testing.T) {
+ two := 2
+ s := struct {
+ I int `stm:"int"`
+ Ptr *int `stm:"ptr"`
+ Nil *int `stm:"nil"`
+ }{
+ I: 1,
+ Ptr: &two,
+ Nil: nil,
+ }
+
+ expected := map[string]int64{
+ "int": 1,
+ "ptr": 2,
+ }
+
+ assert.EqualValuesf(t, expected, stm.ToMap(s), "value test")
+ assert.EqualValuesf(t, expected, stm.ToMap(&s), "ptr test")
+}
+
+func TestToMap_invalidType(t *testing.T) {
+ s := struct {
+ Str string `stm:"int"`
+ }{
+ Str: "abc",
+ }
+
+ assert.Panics(t, func() {
+ stm.ToMap(s)
+ }, "value test")
+ assert.Panics(t, func() {
+ stm.ToMap(&s)
+ }, "ptr test")
+}
+
+func TestToMap_duplicateKey(t *testing.T) {
+ {
+ s := struct {
+ Key int `stm:"key"`
+ M map[string]int `stm:""`
+ }{
+ Key: 1,
+ M: map[string]int{
+ "key": 2,
+ },
+ }
+
+ assert.Panics(t, func() {
+ stm.ToMap(s)
+ }, "value test")
+ assert.Panics(t, func() {
+ stm.ToMap(&s)
+ }, "ptr test")
+ }
+ {
+ s := struct {
+ Key float64 `stm:"key"`
+ M map[string]float64 `stm:""`
+ }{
+ Key: 1,
+ M: map[string]float64{
+ "key": 2,
+ },
+ }
+
+ assert.Panics(t, func() {
+ stm.ToMap(s)
+ }, "value test")
+ assert.Panics(t, func() {
+ stm.ToMap(&s)
+ }, "ptr test")
+ }
+}
+
+func TestToMap_Variadic(t *testing.T) {
+ s1 := struct {
+ Key1 int `stm:"key1"`
+ }{
+ Key1: 1,
+ }
+ s2 := struct {
+ Key2 int `stm:"key2"`
+ }{
+ Key2: 2,
+ }
+ s3 := struct {
+ Key3 int `stm:"key3"`
+ }{
+ Key3: 3,
+ }
+
+ assert.Equal(
+ t,
+ map[string]int64{
+ "key1": 1,
+ "key2": 2,
+ "key3": 3,
+ },
+ stm.ToMap(s1, s2, s3),
+ )
+}
+
+func TestToMap_badTag(t *testing.T) {
+ assert.Panics(t, func() {
+ s := struct {
+ A int `stm:"a,not_int"`
+ }{1}
+ stm.ToMap(s)
+ })
+ assert.Panics(t, func() {
+ s := struct {
+ A int `stm:"a,1,not_int"`
+ }{1}
+ stm.ToMap(s)
+ })
+ assert.Panics(t, func() {
+ s := struct {
+ A int `stm:"a,not_int,1"`
+ }{1}
+ stm.ToMap(s)
+ })
+ assert.Panics(t, func() {
+ s := struct {
+ A int `stm:"a,1,2,3"`
+ }{1}
+ stm.ToMap(s)
+ })
+}
+
+func TestToMap_nilValue(t *testing.T) {
+ assert.Panics(t, func() {
+ s := struct {
+ a metrics.CounterVec `stm:"a"`
+ }{nil}
+ stm.ToMap(s)
+ })
+}
+func TestToMap_bool(t *testing.T) {
+ s := struct {
+ A bool `stm:"a"`
+ B bool `stm:"b"`
+ }{
+ A: true,
+ B: false,
+ }
+ assert.Equal(
+ t,
+ map[string]int64{
+ "a": 1,
+ "b": 0,
+ },
+ stm.ToMap(s),
+ )
+}
+
+func TestToMap_ArraySlice(t *testing.T) {
+ s := [4]interface{}{
+ map[string]int{
+ "B": 1,
+ "C": 2,
+ },
+ struct {
+ D int `stm:"D"`
+ E int `stm:"E"`
+ }{
+ D: 3,
+ E: 4,
+ },
+ struct {
+ STMKey string
+ F int `stm:"F"`
+ G int `stm:"G"`
+ }{
+ F: 5,
+ G: 6,
+ },
+ struct {
+ STMKey string
+ H int `stm:"H"`
+ I int `stm:"I"`
+ }{
+ STMKey: "KEY",
+ H: 7,
+ I: 8,
+ },
+ }
+
+ assert.Equal(
+ t,
+ map[string]int64{
+ "B": 1,
+ "C": 2,
+ "D": 3,
+ "E": 4,
+ "F": 5,
+ "G": 6,
+ "KEY_H": 7,
+ "KEY_I": 8,
+ },
+ stm.ToMap(s),
+ )
+
+ assert.Equal(
+ t,
+ map[string]int64{
+ "B": 1,
+ "C": 2,
+ "D": 3,
+ "E": 4,
+ "F": 5,
+ "G": 6,
+ "KEY_H": 7,
+ "KEY_I": 8,
+ },
+ stm.ToMap(s[:]),
+ )
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/tlscfg/config.go b/src/go/collectors/go.d.plugin/pkg/tlscfg/config.go
new file mode 100644
index 000000000..60e152e0f
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/tlscfg/config.go
@@ -0,0 +1,77 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package tlscfg
+
+import (
+ "crypto/tls"
+ "crypto/x509"
+ "fmt"
+ "os"
+)
+
+// TLSConfig represents the standard client TLS configuration.
+type TLSConfig struct {
+ // TLSCA specifies the certificate authority to use when verifying server certificates.
+ TLSCA string `yaml:"tls_ca" json:"tls_ca"`
+
+ // TLSCert specifies tls certificate file.
+ TLSCert string `yaml:"tls_cert" json:"tls_cert"`
+
+ // TLSKey specifies tls key file.
+ TLSKey string `yaml:"tls_key" json:"tls_key"`
+
+ // InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name.
+ InsecureSkipVerify bool `yaml:"tls_skip_verify" json:"tls_skip_verify"`
+}
+
+// NewTLSConfig creates a tls.Config, may be nil without an error if TLS is not configured.
+func NewTLSConfig(cfg TLSConfig) (*tls.Config, error) {
+ if cfg.TLSCA == "" && cfg.TLSKey == "" && cfg.TLSCert == "" && !cfg.InsecureSkipVerify {
+ return nil, nil
+ }
+
+ tlsConfig := &tls.Config{
+ InsecureSkipVerify: cfg.InsecureSkipVerify,
+ Renegotiation: tls.RenegotiateNever,
+ }
+
+ if cfg.TLSCA != "" {
+ pool, err := loadCertPool([]string{cfg.TLSCA})
+ if err != nil {
+ return nil, err
+ }
+ tlsConfig.RootCAs = pool
+ }
+
+ if cfg.TLSCert != "" && cfg.TLSKey != "" {
+ cert, err := loadCertificate(cfg.TLSCert, cfg.TLSKey)
+ if err != nil {
+ return nil, err
+ }
+ tlsConfig.Certificates = []tls.Certificate{cert}
+ }
+
+ return tlsConfig, nil
+}
+
+func loadCertPool(certFiles []string) (*x509.CertPool, error) {
+ pool := x509.NewCertPool()
+ for _, certFile := range certFiles {
+ pem, err := os.ReadFile(certFile)
+ if err != nil {
+ return nil, fmt.Errorf("could not read certificate %q: %v", certFile, err)
+ }
+ if !pool.AppendCertsFromPEM(pem) {
+ return nil, fmt.Errorf("could not parse any PEM certificates %q: %v", certFile, err)
+ }
+ }
+ return pool, nil
+}
+
+func loadCertificate(certFile, keyFile string) (tls.Certificate, error) {
+ cert, err := tls.LoadX509KeyPair(certFile, keyFile)
+ if err != nil {
+ return tls.Certificate{}, fmt.Errorf("could not load keypair %s:%s: %v", certFile, keyFile, err)
+ }
+ return cert, nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/tlscfg/config_test.go b/src/go/collectors/go.d.plugin/pkg/tlscfg/config_test.go
new file mode 100644
index 000000000..d95fe24bc
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/tlscfg/config_test.go
@@ -0,0 +1,10 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package tlscfg
+
+import "testing"
+
+// TODO:
+func TestNewClientTLSConfig(t *testing.T) {
+
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/client.go b/src/go/collectors/go.d.plugin/pkg/web/client.go
new file mode 100644
index 000000000..616d8f8fc
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/client.go
@@ -0,0 +1,80 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+import (
+ "errors"
+ "fmt"
+ "net"
+ "net/http"
+ "net/url"
+
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/tlscfg"
+)
+
+// ErrRedirectAttempted indicates that a redirect occurred.
+var ErrRedirectAttempted = errors.New("redirect")
+
+// Client is the configuration of the HTTP client.
+// This structure is not intended to be used directly as part of a module's configuration.
+// Supported configuration file formats: YAML.
+type Client struct {
+ // Timeout specifies a time limit for requests made by this Client.
+ // Default (zero value) is no timeout. Must be set before http.Client creation.
+ Timeout Duration `yaml:"timeout" json:"timeout"`
+
+ // NotFollowRedirect specifies the policy for handling redirects.
+ // Default (zero value) is std http package default policy (stop after 10 consecutive requests).
+ NotFollowRedirect bool `yaml:"not_follow_redirects" json:"not_follow_redirects"`
+
+ // ProxyURL specifies the URL of the proxy to use. An empty string means use the environment variables
+ // HTTP_PROXY, HTTPS_PROXY and NO_PROXY (or the lowercase versions thereof) to get the URL.
+ ProxyURL string `yaml:"proxy_url" json:"proxy_url"`
+
+ // TLSConfig specifies the TLS configuration.
+ tlscfg.TLSConfig `yaml:",inline" json:",inline"`
+}
+
+// NewHTTPClient returns a new *http.Client given a Client configuration and an error if any.
+func NewHTTPClient(cfg Client) (*http.Client, error) {
+ tlsConfig, err := tlscfg.NewTLSConfig(cfg.TLSConfig)
+ if err != nil {
+ return nil, fmt.Errorf("error on creating TLS config: %v", err)
+ }
+
+ if cfg.ProxyURL != "" {
+ if _, err := url.Parse(cfg.ProxyURL); err != nil {
+ return nil, fmt.Errorf("error on parsing proxy URL '%s': %v", cfg.ProxyURL, err)
+ }
+ }
+
+ d := &net.Dialer{Timeout: cfg.Timeout.Duration()}
+
+ transport := &http.Transport{
+ Proxy: proxyFunc(cfg.ProxyURL),
+ TLSClientConfig: tlsConfig,
+ DialContext: d.DialContext,
+ TLSHandshakeTimeout: cfg.Timeout.Duration(),
+ }
+
+ return &http.Client{
+ Timeout: cfg.Timeout.Duration(),
+ Transport: transport,
+ CheckRedirect: redirectFunc(cfg.NotFollowRedirect),
+ }, nil
+}
+
+func redirectFunc(notFollowRedirect bool) func(req *http.Request, via []*http.Request) error {
+ if follow := !notFollowRedirect; follow {
+ return nil
+ }
+ return func(_ *http.Request, _ []*http.Request) error { return ErrRedirectAttempted }
+}
+
+func proxyFunc(rawProxyURL string) func(r *http.Request) (*url.URL, error) {
+ if rawProxyURL == "" {
+ return http.ProxyFromEnvironment
+ }
+ proxyURL, _ := url.Parse(rawProxyURL)
+ return http.ProxyURL(proxyURL)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/client_test.go b/src/go/collectors/go.d.plugin/pkg/web/client_test.go
new file mode 100644
index 000000000..ead1486c3
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/client_test.go
@@ -0,0 +1,23 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+import (
+ "net/http"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNewHTTPClient(t *testing.T) {
+ client, _ := NewHTTPClient(Client{
+ Timeout: Duration(time.Second * 5),
+ NotFollowRedirect: true,
+ ProxyURL: "http://127.0.0.1:3128",
+ })
+
+ assert.IsType(t, (*http.Client)(nil), client)
+ assert.Equal(t, time.Second*5, client.Timeout)
+ assert.NotNil(t, client.CheckRedirect)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/doc.go b/src/go/collectors/go.d.plugin/pkg/web/doc.go
new file mode 100644
index 000000000..4c6d31461
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/doc.go
@@ -0,0 +1,9 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+/*
+Package web contains HTTP request and client configurations.
+HTTP structure embeds both of them, and it's the only structure that intended to be used as part of a module's configuration.
+Every module that uses HTTP requests to collect metrics should use it.
+It allows to have same set of user configurable options across all modules.
+*/
+package web
diff --git a/src/go/collectors/go.d.plugin/pkg/web/doc_test.go b/src/go/collectors/go.d.plugin/pkg/web/doc_test.go
new file mode 100644
index 000000000..137eed207
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/doc_test.go
@@ -0,0 +1,15 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+func ExampleHTTP_usage() {
+ // Just embed HTTP into your module structure.
+ // It allows you to have both Request and Client fields in the module configuration file.
+ type myModule struct {
+ HTTP `yaml:",inline"`
+ }
+
+ var m myModule
+ _, _ = NewHTTPRequest(m.Request)
+ _, _ = NewHTTPClient(m.Client)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/duration.go b/src/go/collectors/go.d.plugin/pkg/web/duration.go
new file mode 100644
index 000000000..85d5ef650
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/duration.go
@@ -0,0 +1,72 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+import (
+ "encoding/json"
+ "fmt"
+ "strconv"
+ "time"
+)
+
+type Duration time.Duration
+
+func (d Duration) Duration() time.Duration {
+ return time.Duration(d)
+}
+
+func (d Duration) String() string {
+ return d.Duration().String()
+}
+
+func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var s string
+
+ if err := unmarshal(&s); err != nil {
+ return err
+ }
+
+ if v, err := time.ParseDuration(s); err == nil {
+ *d = Duration(v)
+ return nil
+ }
+ if v, err := strconv.ParseInt(s, 10, 64); err == nil {
+ *d = Duration(time.Duration(v) * time.Second)
+ return nil
+ }
+ if v, err := strconv.ParseFloat(s, 64); err == nil {
+ *d = Duration(v * float64(time.Second))
+ return nil
+ }
+
+ return fmt.Errorf("unparsable duration format '%s'", s)
+}
+
+func (d Duration) MarshalYAML() (any, error) {
+ seconds := float64(d) / float64(time.Second)
+ return seconds, nil
+}
+
+func (d *Duration) UnmarshalJSON(b []byte) error {
+ s := string(b)
+
+ if v, err := time.ParseDuration(s); err == nil {
+ *d = Duration(v)
+ return nil
+ }
+ if v, err := strconv.ParseInt(s, 10, 64); err == nil {
+ *d = Duration(time.Duration(v) * time.Second)
+ return nil
+ }
+ if v, err := strconv.ParseFloat(s, 64); err == nil {
+ *d = Duration(v * float64(time.Second))
+ return nil
+ }
+
+ return fmt.Errorf("unparsable duration format '%s'", s)
+}
+
+func (d Duration) MarshalJSON() ([]byte, error) {
+ seconds := float64(d) / float64(time.Second)
+ return json.Marshal(seconds)
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/duration_test.go b/src/go/collectors/go.d.plugin/pkg/web/duration_test.go
new file mode 100644
index 000000000..b45063f13
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/duration_test.go
@@ -0,0 +1,114 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+
+ "gopkg.in/yaml.v2"
+)
+
+func TestDuration_MarshalYAML(t *testing.T) {
+ tests := map[string]struct {
+ d Duration
+ want string
+ }{
+ "1 second": {d: Duration(time.Second), want: "1"},
+ "1.5 seconds": {d: Duration(time.Second + time.Millisecond*500), want: "1.5"},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ bs, err := yaml.Marshal(&test.d)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.want, strings.TrimSpace(string(bs)))
+ })
+ }
+}
+
+func TestDuration_MarshalJSON(t *testing.T) {
+ tests := map[string]struct {
+ d Duration
+ want string
+ }{
+ "1 second": {d: Duration(time.Second), want: "1"},
+ "1.5 seconds": {d: Duration(time.Second + time.Millisecond*500), want: "1.5"},
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ bs, err := json.Marshal(&test.d)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.want, strings.TrimSpace(string(bs)))
+ })
+ }
+}
+
+func TestDuration_UnmarshalYAML(t *testing.T) {
+ tests := map[string]struct {
+ input any
+ }{
+ "duration": {input: "300ms"},
+ "string int": {input: "1"},
+ "string float": {input: "1.1"},
+ "int": {input: 2},
+ "float": {input: 2.2},
+ }
+
+ var zero Duration
+
+ for name, test := range tests {
+ name = fmt.Sprintf("%s (%v)", name, test.input)
+ t.Run(name, func(t *testing.T) {
+ data, err := yaml.Marshal(test.input)
+ require.NoError(t, err)
+
+ var d Duration
+ require.NoError(t, yaml.Unmarshal(data, &d))
+ assert.NotEqual(t, zero.String(), d.String())
+ })
+ }
+}
+
+func TestDuration_UnmarshalJSON(t *testing.T) {
+ tests := map[string]struct {
+ input any
+ }{
+ "duration": {input: "300ms"},
+ "string int": {input: "1"},
+ "string float": {input: "1.1"},
+ "int": {input: 2},
+ "float": {input: 2.2},
+ }
+
+ var zero Duration
+
+ type duration struct {
+ D Duration `json:"d"`
+ }
+ type input struct {
+ D any `json:"d"`
+ }
+
+ for name, test := range tests {
+ name = fmt.Sprintf("%s (%v)", name, test.input)
+ t.Run(name, func(t *testing.T) {
+ input := input{D: test.input}
+ data, err := yaml.Marshal(input)
+ require.NoError(t, err)
+
+ var d duration
+ require.NoError(t, yaml.Unmarshal(data, &d))
+ assert.NotEqual(t, zero.String(), d.D.String())
+ })
+ }
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/request.go b/src/go/collectors/go.d.plugin/pkg/web/request.go
new file mode 100644
index 000000000..655b3c7d0
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/request.go
@@ -0,0 +1,92 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+import (
+ "encoding/base64"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
+
+ "github.com/netdata/netdata/go/go.d.plugin/agent/executable"
+ "github.com/netdata/netdata/go/go.d.plugin/pkg/buildinfo"
+)
+
+// Request is the configuration of the HTTP request.
+// This structure is not intended to be used directly as part of a module's configuration.
+// Supported configuration file formats: YAML.
+type Request struct {
+ // URL specifies the URL to access.
+ URL string `yaml:"url" json:"url"`
+
+ // Body specifies the HTTP request body to be sent by the client.
+ Body string `yaml:"body" json:"body"`
+
+ // Method specifies the HTTP method (GET, POST, PUT, etc.). An empty string means GET.
+ Method string `yaml:"method" json:"method"`
+
+ // Headers specifies the HTTP request header fields to be sent by the client.
+ Headers map[string]string `yaml:"headers" json:"headers"`
+
+ // Username specifies the username for basic HTTP authentication.
+ Username string `yaml:"username" json:"username"`
+
+ // Password specifies the password for basic HTTP authentication.
+ Password string `yaml:"password" json:"password"`
+
+ // ProxyUsername specifies the username for basic HTTP authentication.
+ // It is used to authenticate a user agent to a proxy server.
+ ProxyUsername string `yaml:"proxy_username" json:"proxy_username"`
+
+ // ProxyPassword specifies the password for basic HTTP authentication.
+ // It is used to authenticate a user agent to a proxy server.
+ ProxyPassword string `yaml:"proxy_password" json:"proxy_password"`
+}
+
+// Copy makes a full copy of the Request.
+func (r Request) Copy() Request {
+ headers := make(map[string]string, len(r.Headers))
+ for k, v := range r.Headers {
+ headers[k] = v
+ }
+ r.Headers = headers
+ return r
+}
+
+var userAgent = fmt.Sprintf("Netdata %s.plugin/%s", executable.Name, buildinfo.Version)
+
+// NewHTTPRequest returns a new *http.Requests given a Request configuration and an error if any.
+func NewHTTPRequest(cfg Request) (*http.Request, error) {
+ var body io.Reader
+ if cfg.Body != "" {
+ body = strings.NewReader(cfg.Body)
+ }
+
+ req, err := http.NewRequest(cfg.Method, cfg.URL, body)
+ if err != nil {
+ return nil, err
+ }
+
+ req.Header.Set("User-Agent", userAgent)
+
+ if cfg.Username != "" || cfg.Password != "" {
+ req.SetBasicAuth(cfg.Username, cfg.Password)
+ }
+
+ if cfg.ProxyUsername != "" && cfg.ProxyPassword != "" {
+ basicAuth := base64.StdEncoding.EncodeToString([]byte(cfg.ProxyUsername + ":" + cfg.ProxyPassword))
+ req.Header.Set("Proxy-Authorization", "Basic "+basicAuth)
+ }
+
+ for k, v := range cfg.Headers {
+ switch k {
+ case "host", "Host":
+ req.Host = v
+ default:
+ req.Header.Set(k, v)
+ }
+ }
+
+ return req, nil
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/request_test.go b/src/go/collectors/go.d.plugin/pkg/web/request_test.go
new file mode 100644
index 000000000..284cccb93
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/request_test.go
@@ -0,0 +1,180 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+import (
+ "encoding/base64"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRequest_Copy(t *testing.T) {
+ tests := map[string]struct {
+ orig Request
+ change func(req *Request)
+ }{
+ "change headers": {
+ orig: Request{
+ URL: "http://127.0.0.1:19999/api/v1/info",
+ Method: "POST",
+ Headers: map[string]string{
+ "X-Api-Key": "secret",
+ },
+ Username: "username",
+ Password: "password",
+ ProxyUsername: "proxy_username",
+ ProxyPassword: "proxy_password",
+ },
+ change: func(req *Request) {
+ req.Headers["header_key"] = "header_value"
+ },
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ reqCopy := test.orig.Copy()
+
+ assert.Equal(t, test.orig, reqCopy)
+ test.change(&reqCopy)
+ assert.NotEqual(t, test.orig, reqCopy)
+ })
+ }
+}
+
+func TestNewHTTPRequest(t *testing.T) {
+ tests := map[string]struct {
+ req Request
+ wantErr bool
+ }{
+ "test url": {
+ req: Request{
+ URL: "http://127.0.0.1:19999/api/v1/info",
+ },
+ wantErr: false,
+ },
+ "test body": {
+ req: Request{
+ Body: "content",
+ },
+ wantErr: false,
+ },
+ "test method": {
+ req: Request{
+ Method: "POST",
+ },
+ wantErr: false,
+ },
+ "test headers": {
+ req: Request{
+ Headers: map[string]string{
+ "X-Api-Key": "secret",
+ },
+ },
+ wantErr: false,
+ },
+ "test special headers (host)": {
+ req: Request{
+ Headers: map[string]string{
+ "host": "Host",
+ },
+ },
+ wantErr: false,
+ },
+ "test special headers (Host)": {
+ req: Request{
+ Headers: map[string]string{
+ "Host": "Host",
+ },
+ },
+ wantErr: false,
+ },
+ "test username and password": {
+ req: Request{
+ Username: "username",
+ Password: "password",
+ },
+ wantErr: false,
+ },
+ "test proxy username and proxy password": {
+ req: Request{
+ ProxyUsername: "proxy_username",
+ ProxyPassword: "proxy_password",
+ },
+ wantErr: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ httpReq, err := NewHTTPRequest(test.req)
+
+ if test.wantErr {
+ assert.Error(t, err)
+ assert.Nil(t, httpReq)
+ return
+ }
+
+ require.NoError(t, err)
+ require.NotNil(t, httpReq)
+ require.IsType(t, (*http.Request)(nil), httpReq)
+
+ assert.Equal(t, test.req.URL, httpReq.URL.String())
+
+ if test.req.Body != "" {
+ assert.NotNil(t, httpReq.Body)
+ }
+
+ if test.req.Username != "" || test.req.Password != "" {
+ user, pass, ok := httpReq.BasicAuth()
+ assert.True(t, ok)
+ assert.Equal(t, test.req.Username, user)
+ assert.Equal(t, test.req.Password, pass)
+ }
+
+ if test.req.Method != "" {
+ assert.Equal(t, test.req.Method, httpReq.Method)
+ }
+
+ if test.req.ProxyUsername != "" || test.req.ProxyPassword != "" {
+ user, pass, ok := parseBasicAuth(httpReq.Header.Get("Proxy-Authorization"))
+ assert.True(t, ok)
+ assert.Equal(t, test.req.ProxyUsername, user)
+ assert.Equal(t, test.req.ProxyPassword, pass)
+ }
+
+ for k, v := range test.req.Headers {
+ switch k {
+ case "host", "Host":
+ assert.Equal(t, httpReq.Host, v)
+ default:
+ assert.Equal(t, v, httpReq.Header.Get(k))
+ }
+ }
+ })
+ }
+}
+
+func parseBasicAuth(auth string) (username, password string, ok bool) {
+ const prefix = "Basic "
+ if len(auth) < len(prefix) || !strings.EqualFold(auth[:len(prefix)], prefix) {
+ return "", "", false
+ }
+
+ decoded, err := base64.StdEncoding.DecodeString(auth[len(prefix):])
+ if err != nil {
+ return "", "", false
+ }
+
+ decodedStr := string(decoded)
+ idx := strings.IndexByte(decodedStr, ':')
+ if idx < 0 {
+ return "", "", false
+ }
+
+ return decodedStr[:idx], decodedStr[idx+1:], true
+}
diff --git a/src/go/collectors/go.d.plugin/pkg/web/web.go b/src/go/collectors/go.d.plugin/pkg/web/web.go
new file mode 100644
index 000000000..07cef4839
--- /dev/null
+++ b/src/go/collectors/go.d.plugin/pkg/web/web.go
@@ -0,0 +1,11 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package web
+
+// HTTP is a struct with embedded Request and Client.
+// This structure intended to be part of the module configuration.
+// Supported configuration file formats: YAML.
+type HTTP struct {
+ Request `yaml:",inline" json:",inline"`
+ Client `yaml:",inline" json:",inline"`
+}