Adding upstream version 1.34.4.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
e393c3af3f
commit
4978089aab
4963 changed files with 677545 additions and 0 deletions
177
plugins/parsers/dropwizard/README.md
Normal file
177
plugins/parsers/dropwizard/README.md
Normal file
|
@ -0,0 +1,177 @@
|
|||
# Dropwizard Parser Plugin
|
||||
|
||||
The `dropwizard` data format can parse the [JSON Dropwizard][dropwizard]
|
||||
representation of a single dropwizard metric registry. By default, tags are
|
||||
parsed from metric names as if they were actual influxdb line protocol keys
|
||||
(`measurement<,tag_set>`) which can be overridden by defining a custom [template
|
||||
pattern][templates]. All field value types are supported, `string`, `number` and
|
||||
`boolean`.
|
||||
|
||||
[templates]: /docs/TEMPLATE_PATTERN.md
|
||||
[dropwizard]: http://metrics.dropwizard.io/3.1.0/manual/json/
|
||||
|
||||
## Configuration
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["example"]
|
||||
|
||||
## Data format to consume.
|
||||
## Each data format has its own unique set of configuration options, read
|
||||
## more about them here:
|
||||
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
|
||||
data_format = "dropwizard"
|
||||
|
||||
## Used by the templating engine to join matched values when cardinality is > 1
|
||||
separator = "_"
|
||||
|
||||
## Each template line requires a template pattern. It can have an optional
|
||||
## filter before the template and separated by spaces. It can also have optional extra
|
||||
## tags following the template. Multiple tags should be separated by commas and no spaces
|
||||
## similar to the line protocol format. There can be only one default template.
|
||||
## Templates support below format:
|
||||
## 1. filter + template
|
||||
## 2. filter + template + extra tag(s)
|
||||
## 3. filter + template with field key
|
||||
## 4. default template
|
||||
## By providing an empty template array, templating is disabled and measurements are parsed as influxdb line protocol keys (measurement<,tag_set>)
|
||||
templates = []
|
||||
|
||||
## You may use an appropriate [gjson path](https://github.com/tidwall/gjson#path-syntax)
|
||||
## to locate the metric registry within the JSON document
|
||||
# dropwizard_metric_registry_path = "metrics"
|
||||
|
||||
## You may use an appropriate [gjson path](https://github.com/tidwall/gjson#path-syntax)
|
||||
## to locate the default time of the measurements within the JSON document
|
||||
# dropwizard_time_path = "time"
|
||||
# dropwizard_time_format = "2006-01-02T15:04:05Z07:00"
|
||||
|
||||
## You may use an appropriate [gjson path](https://github.com/tidwall/gjson#path-syntax)
|
||||
## to locate the tags map within the JSON document
|
||||
# dropwizard_tags_path = "tags"
|
||||
|
||||
## You may even use tag paths per tag
|
||||
# [inputs.exec.dropwizard_tag_paths]
|
||||
# tag1 = "tags.tag1"
|
||||
# tag2 = "tags.tag2"
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
A typical JSON of a dropwizard metric registry:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {
|
||||
"measurement,tag1=green" : {
|
||||
"count" : 1
|
||||
}
|
||||
},
|
||||
"meters" : {
|
||||
"measurement" : {
|
||||
"count" : 1,
|
||||
"m15_rate" : 1.0,
|
||||
"m1_rate" : 1.0,
|
||||
"m5_rate" : 1.0,
|
||||
"mean_rate" : 1.0,
|
||||
"units" : "events/second"
|
||||
}
|
||||
},
|
||||
"gauges" : {
|
||||
"measurement" : {
|
||||
"value" : 1
|
||||
}
|
||||
},
|
||||
"histograms" : {
|
||||
"measurement" : {
|
||||
"count" : 1,
|
||||
"max" : 1.0,
|
||||
"mean" : 1.0,
|
||||
"min" : 1.0,
|
||||
"p50" : 1.0,
|
||||
"p75" : 1.0,
|
||||
"p95" : 1.0,
|
||||
"p98" : 1.0,
|
||||
"p99" : 1.0,
|
||||
"p999" : 1.0,
|
||||
"stddev" : 1.0
|
||||
}
|
||||
},
|
||||
"timers" : {
|
||||
"measurement" : {
|
||||
"count" : 1,
|
||||
"max" : 1.0,
|
||||
"mean" : 1.0,
|
||||
"min" : 1.0,
|
||||
"p50" : 1.0,
|
||||
"p75" : 1.0,
|
||||
"p95" : 1.0,
|
||||
"p98" : 1.0,
|
||||
"p99" : 1.0,
|
||||
"p999" : 1.0,
|
||||
"stddev" : 1.0,
|
||||
"m15_rate" : 1.0,
|
||||
"m1_rate" : 1.0,
|
||||
"m5_rate" : 1.0,
|
||||
"mean_rate" : 1.0,
|
||||
"duration_units" : "seconds",
|
||||
"rate_units" : "calls/second"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Would get translated into 4 different measurements:
|
||||
|
||||
```text
|
||||
measurement,metric_type=counter,tag1=green count=1
|
||||
measurement,metric_type=meter count=1,m15_rate=1.0,m1_rate=1.0,m5_rate=1.0,mean_rate=1.0
|
||||
measurement,metric_type=gauge value=1
|
||||
measurement,metric_type=histogram count=1,max=1.0,mean=1.0,min=1.0,p50=1.0,p75=1.0,p95=1.0,p98=1.0,p99=1.0,p999=1.0
|
||||
measurement,metric_type=timer count=1,max=1.0,mean=1.0,min=1.0,p50=1.0,p75=1.0,p95=1.0,p98=1.0,p99=1.0,p999=1.0,stddev=1.0,m15_rate=1.0,m1_rate=1.0,m5_rate=1.0,mean_rate=1.0
|
||||
```
|
||||
|
||||
You may also parse a dropwizard registry from any JSON document which contains a
|
||||
dropwizard registry in some inner field. Eg. to parse the following JSON
|
||||
document:
|
||||
|
||||
```json
|
||||
{
|
||||
"time" : "2017-02-22T14:33:03.662+02:00",
|
||||
"tags" : {
|
||||
"tag1" : "green",
|
||||
"tag2" : "yellow"
|
||||
},
|
||||
"metrics" : {
|
||||
"counters" : {
|
||||
"measurement" : {
|
||||
"count" : 1
|
||||
}
|
||||
},
|
||||
"meters" : {},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
and translate it into:
|
||||
|
||||
```text
|
||||
measurement,metric_type=counter,tag1=green,tag2=yellow count=1 1487766783662000000
|
||||
```
|
||||
|
||||
you simply need to use the following additional configuration properties:
|
||||
|
||||
```toml
|
||||
dropwizard_metric_registry_path = "metrics"
|
||||
dropwizard_time_path = "time"
|
||||
dropwizard_time_format = "2006-01-02T15:04:05Z07:00"
|
||||
dropwizard_tags_path = "tags"
|
||||
## tag paths per tag are supported too, eg.
|
||||
#[inputs.yourinput.dropwizard_tag_paths]
|
||||
# tag1 = "tags.tag1"
|
||||
# tag2 = "tags.tag2"
|
||||
```
|
259
plugins/parsers/dropwizard/parser.go
Normal file
259
plugins/parsers/dropwizard/parser.go
Normal file
|
@ -0,0 +1,259 @@
|
|||
package dropwizard
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/internal/templating"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||
)
|
||||
|
||||
// Parser parses json inputs containing dropwizard metrics,
|
||||
// either top-level or embedded inside a json field.
|
||||
// This parser is using gjson for retrieving paths within the json file.
|
||||
type Parser struct {
|
||||
MetricRegistryPath string `toml:"dropwizard_metric_registry_path"`
|
||||
TimePath string `toml:"dropwizard_time_path"`
|
||||
TimeFormat string `toml:"dropwizard_time_format"`
|
||||
TagsPath string `toml:"dropwizard_tags_path"`
|
||||
TagPathsMap map[string]string `toml:"dropwizard_tag_paths_map"`
|
||||
Separator string `toml:"separator"`
|
||||
Templates []string `toml:"templates"`
|
||||
DefaultTags map[string]string `toml:"-"`
|
||||
Log telegraf.Logger `toml:"-"`
|
||||
|
||||
templateEngine *templating.Engine
|
||||
|
||||
// seriesParser parses line protocol measurement + tags
|
||||
seriesParser *influx.Parser
|
||||
}
|
||||
|
||||
// Parse parses the input bytes to an array of metrics
|
||||
func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||
metrics := make([]telegraf.Metric, 0)
|
||||
|
||||
metricTime, err := p.parseTime(buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dwr, err := p.unmarshalMetrics(buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metrics, err = p.readDWMetrics("counter", dwr["counters"], metrics, metricTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics, err = p.readDWMetrics("meter", dwr["meters"], metrics, metricTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics, err = p.readDWMetrics("gauge", dwr["gauges"], metrics, metricTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics, err = p.readDWMetrics("histogram", dwr["histograms"], metrics, metricTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics, err = p.readDWMetrics("timer", dwr["timers"], metrics, metricTime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
jsonTags := p.readTags(buf)
|
||||
|
||||
// fill json tags first
|
||||
if len(jsonTags) > 0 {
|
||||
for _, m := range metrics {
|
||||
for k, v := range jsonTags {
|
||||
// only set the tag if it doesn't already exist:
|
||||
if !m.HasTag(k) {
|
||||
m.AddTag(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// fill default tags last
|
||||
if len(p.DefaultTags) > 0 {
|
||||
for _, m := range metrics {
|
||||
for k, v := range p.DefaultTags {
|
||||
// only set the default tag if it doesn't already exist:
|
||||
if !m.HasTag(k) {
|
||||
m.AddTag(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return metrics, nil
|
||||
}
|
||||
|
||||
// ParseLine is not supported by the dropwizard format
|
||||
func (*Parser) ParseLine(string) (telegraf.Metric, error) {
|
||||
return nil, errors.New("parsing line is not supported by the dropwizard format")
|
||||
}
|
||||
|
||||
// SetDefaultTags sets the default tags
|
||||
func (p *Parser) SetDefaultTags(tags map[string]string) {
|
||||
p.DefaultTags = tags
|
||||
}
|
||||
|
||||
func (p *Parser) readTags(buf []byte) map[string]string {
|
||||
if p.TagsPath != "" {
|
||||
var tagsBytes []byte
|
||||
tagsResult := gjson.GetBytes(buf, p.TagsPath)
|
||||
if tagsResult.Index > 0 {
|
||||
tagsBytes = buf[tagsResult.Index : tagsResult.Index+len(tagsResult.Raw)]
|
||||
} else {
|
||||
tagsBytes = []byte(tagsResult.Raw)
|
||||
}
|
||||
var tags map[string]string
|
||||
err := json.Unmarshal(tagsBytes, &tags)
|
||||
if err != nil {
|
||||
p.Log.Warnf("Failed to parse tags from JSON path %q: %s", p.TagsPath, err.Error())
|
||||
} else if len(tags) > 0 {
|
||||
return tags
|
||||
}
|
||||
}
|
||||
|
||||
tags := make(map[string]string)
|
||||
for tagKey, jsonPath := range p.TagPathsMap {
|
||||
tags[tagKey] = gjson.GetBytes(buf, jsonPath).String()
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
func (p *Parser) parseTime(buf []byte) (time.Time, error) {
|
||||
if p.TimePath != "" {
|
||||
timeFormat := p.TimeFormat
|
||||
if timeFormat == "" {
|
||||
timeFormat = time.RFC3339
|
||||
}
|
||||
timeString := gjson.GetBytes(buf, p.TimePath).String()
|
||||
if timeString == "" {
|
||||
return time.Time{}, fmt.Errorf("time not found in JSON path %s", p.TimePath)
|
||||
}
|
||||
t, err := time.Parse(timeFormat, timeString)
|
||||
if err != nil {
|
||||
return time.Time{}, fmt.Errorf("time %s cannot be parsed with format %s, %w", timeString, timeFormat, err)
|
||||
}
|
||||
return t.UTC(), nil
|
||||
}
|
||||
return time.Now(), nil
|
||||
}
|
||||
|
||||
func (p *Parser) unmarshalMetrics(buf []byte) (map[string]interface{}, error) {
|
||||
var registryBytes []byte
|
||||
if p.MetricRegistryPath != "" {
|
||||
regResult := gjson.GetBytes(buf, p.MetricRegistryPath)
|
||||
if regResult.Index > 0 {
|
||||
registryBytes = buf[regResult.Index : regResult.Index+len(regResult.Raw)]
|
||||
} else {
|
||||
registryBytes = []byte(regResult.Raw)
|
||||
}
|
||||
if len(registryBytes) == 0 {
|
||||
err := fmt.Errorf("metric registry not found in JSON path %s", p.MetricRegistryPath)
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
registryBytes = buf
|
||||
}
|
||||
var jsonOut map[string]interface{}
|
||||
err := json.Unmarshal(registryBytes, &jsonOut)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to parse dropwizard metric registry from JSON document, %w", err)
|
||||
return nil, err
|
||||
}
|
||||
return jsonOut, nil
|
||||
}
|
||||
|
||||
func (p *Parser) readDWMetrics(metricType string, dwms interface{}, metrics []telegraf.Metric, tm time.Time) ([]telegraf.Metric, error) {
|
||||
if dwmsTyped, ok := dwms.(map[string]interface{}); ok {
|
||||
for dwmName, dwmFields := range dwmsTyped {
|
||||
measurementName := dwmName
|
||||
tags := make(map[string]string)
|
||||
fieldPrefix := ""
|
||||
if p.templateEngine != nil {
|
||||
var err error
|
||||
measurementName, tags, fieldPrefix, err = p.templateEngine.Apply(dwmName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply template for type %s: %w", metricType, err)
|
||||
}
|
||||
if len(fieldPrefix) > 0 {
|
||||
fieldPrefix = fmt.Sprintf("%s%s", fieldPrefix, p.Separator)
|
||||
}
|
||||
}
|
||||
|
||||
parsed, err := p.seriesParser.Parse([]byte(measurementName))
|
||||
var m telegraf.Metric
|
||||
if err != nil || len(parsed) != 1 {
|
||||
m = metric.New(measurementName, make(map[string]string), make(map[string]interface{}), tm)
|
||||
} else {
|
||||
m = parsed[0]
|
||||
m.SetTime(tm)
|
||||
}
|
||||
|
||||
m.AddTag("metric_type", metricType)
|
||||
for k, v := range tags {
|
||||
m.AddTag(k, v)
|
||||
}
|
||||
|
||||
if fields, ok := dwmFields.(map[string]interface{}); ok {
|
||||
for k, v := range fields {
|
||||
switch v := v.(type) {
|
||||
case float64, string, bool:
|
||||
m.AddField(fieldPrefix+k, v)
|
||||
default:
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
metrics = append(metrics, m)
|
||||
}
|
||||
}
|
||||
|
||||
return metrics, nil
|
||||
}
|
||||
|
||||
func (p *Parser) Init() error {
|
||||
parser := &influx.Parser{
|
||||
Type: "series",
|
||||
}
|
||||
err := parser.Init()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.seriesParser = parser
|
||||
|
||||
if len(p.Templates) != 0 {
|
||||
defaultTemplate, err := templating.NewDefaultTemplateWithPattern("measurement*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
templateEngine, err := templating.NewEngine(p.Separator, defaultTemplate, p.Templates)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.templateEngine = templateEngine
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
parsers.Add("dropwizard",
|
||||
func(string) telegraf.Parser {
|
||||
return &Parser{}
|
||||
})
|
||||
}
|
652
plugins/parsers/dropwizard/parser_test.go
Normal file
652
plugins/parsers/dropwizard/parser_test.go
Normal file
|
@ -0,0 +1,652 @@
|
|||
package dropwizard
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
)
|
||||
|
||||
// validEmptyJSON is a valid dropwizard json document, but without any metrics
|
||||
const validEmptyJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidEmptyJSON(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
// Most basic vanilla test
|
||||
metrics, err := parser.Parse([]byte(validEmptyJSON))
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, metrics)
|
||||
}
|
||||
|
||||
// validCounterJSON is a valid dropwizard json document containing one counter
|
||||
const validCounterJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {
|
||||
"measurement" : {
|
||||
"count" : 1
|
||||
}
|
||||
},
|
||||
"meters" : {},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidCounterJSON(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validCounterJSON))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"count": float64(1),
|
||||
}, metrics[0].Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "counter"}, metrics[0].Tags())
|
||||
}
|
||||
|
||||
// validEmbeddedCounterJSON is a valid json document containing separate fields for dropwizard metrics, tags and time override.
|
||||
const validEmbeddedCounterJSON = `
|
||||
{
|
||||
"time" : "2017-02-22T14:33:03.662+02:00",
|
||||
"tags" : {
|
||||
"tag1" : "green",
|
||||
"tag2" : "yellow",
|
||||
"tag3 space,comma=equals" : "red ,="
|
||||
},
|
||||
"metrics" : {
|
||||
"counters" : {
|
||||
"measurement" : {
|
||||
"count" : 1
|
||||
}
|
||||
},
|
||||
"meters" : {},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidEmbeddedCounterJSON(t *testing.T) {
|
||||
timeFormat := "2006-01-02T15:04:05Z07:00"
|
||||
metricTime, err := time.Parse(timeFormat, "2017-02-22T15:33:03.662+03:00")
|
||||
require.NoError(t, err)
|
||||
parser := &Parser{
|
||||
MetricRegistryPath: "metrics",
|
||||
TagsPath: "tags",
|
||||
TimePath: "time",
|
||||
}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validEmbeddedCounterJSON))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"count": float64(1),
|
||||
}, metrics[0].Fields())
|
||||
require.Equal(t, map[string]string{
|
||||
"metric_type": "counter",
|
||||
"tag1": "green",
|
||||
"tag2": "yellow",
|
||||
"tag3 space,comma=equals": "red ,=",
|
||||
}, metrics[0].Tags())
|
||||
require.Truef(t, metricTime.Equal(metrics[0].Time()), "%s should be equal to %s", metrics[0].Time(), metricTime)
|
||||
|
||||
// now test json tags through TagPathsMap
|
||||
parser2 := &Parser{
|
||||
MetricRegistryPath: "metrics",
|
||||
TagPathsMap: map[string]string{"tag1": "tags.tag1"},
|
||||
TimePath: "time",
|
||||
}
|
||||
require.NoError(t, parser2.Init())
|
||||
metrics2, err2 := parser2.Parse([]byte(validEmbeddedCounterJSON))
|
||||
require.NoError(t, err2)
|
||||
require.Equal(t, map[string]string{"metric_type": "counter", "tag1": "green"}, metrics2[0].Tags())
|
||||
}
|
||||
|
||||
// validMeterJSON1 is a valid dropwizard json document containing one meter
|
||||
const validMeterJSON1 = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {
|
||||
"measurement1" : {
|
||||
"count" : 1,
|
||||
"m15_rate" : 1.0,
|
||||
"m1_rate" : 1.0,
|
||||
"m5_rate" : 1.0,
|
||||
"mean_rate" : 1.0,
|
||||
"units" : "events/second"
|
||||
}
|
||||
},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidMeterJSON1(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validMeterJSON1))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement1", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"count": float64(1),
|
||||
"m15_rate": float64(1),
|
||||
"m1_rate": float64(1),
|
||||
"m5_rate": float64(1),
|
||||
"mean_rate": float64(1),
|
||||
"units": "events/second",
|
||||
}, metrics[0].Fields())
|
||||
|
||||
require.Equal(t, map[string]string{"metric_type": "meter"}, metrics[0].Tags())
|
||||
}
|
||||
|
||||
// validMeterJSON2 is a valid dropwizard json document containing one meter with one tag
|
||||
const validMeterJSON2 = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {
|
||||
"measurement2,key=value" : {
|
||||
"count" : 2,
|
||||
"m15_rate" : 2.0,
|
||||
"m1_rate" : 2.0,
|
||||
"m5_rate" : 2.0,
|
||||
"mean_rate" : 2.0,
|
||||
"units" : "events/second"
|
||||
}
|
||||
},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidMeterJSON2(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validMeterJSON2))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement2", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"count": float64(2),
|
||||
"m15_rate": float64(2),
|
||||
"m1_rate": float64(2),
|
||||
"m5_rate": float64(2),
|
||||
"mean_rate": float64(2),
|
||||
"units": "events/second",
|
||||
}, metrics[0].Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "meter", "key": "value"}, metrics[0].Tags())
|
||||
}
|
||||
|
||||
// validGaugeJSON is a valid dropwizard json document containing one gauge
|
||||
const validGaugeJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {},
|
||||
"gauges" : {
|
||||
"measurement" : {
|
||||
"value" : true
|
||||
}
|
||||
},
|
||||
"histograms" : {},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidGaugeJSON(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validGaugeJSON))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"value": true,
|
||||
}, metrics[0].Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "gauge"}, metrics[0].Tags())
|
||||
}
|
||||
|
||||
// validHistogramJSON is a valid dropwizard json document containing one histogram
|
||||
const validHistogramJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {},
|
||||
"gauges" : {},
|
||||
"histograms" : {
|
||||
"measurement" : {
|
||||
"count" : 1,
|
||||
"max" : 2,
|
||||
"mean" : 3,
|
||||
"min" : 4,
|
||||
"p50" : 5,
|
||||
"p75" : 6,
|
||||
"p95" : 7,
|
||||
"p98" : 8,
|
||||
"p99" : 9,
|
||||
"p999" : 10,
|
||||
"stddev" : 11
|
||||
}
|
||||
},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidHistogramJSON(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validHistogramJSON))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"count": float64(1),
|
||||
"max": float64(2),
|
||||
"mean": float64(3),
|
||||
"min": float64(4),
|
||||
"p50": float64(5),
|
||||
"p75": float64(6),
|
||||
"p95": float64(7),
|
||||
"p98": float64(8),
|
||||
"p99": float64(9),
|
||||
"p999": float64(10),
|
||||
"stddev": float64(11),
|
||||
}, metrics[0].Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "histogram"}, metrics[0].Tags())
|
||||
}
|
||||
|
||||
// validTimerJSON is a valid dropwizard json document containing one timer
|
||||
const validTimerJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {},
|
||||
"gauges" : {},
|
||||
"histograms" : {},
|
||||
"timers" : {
|
||||
"measurement" : {
|
||||
"count" : 1,
|
||||
"max" : 2,
|
||||
"mean" : 3,
|
||||
"min" : 4,
|
||||
"p50" : 5,
|
||||
"p75" : 6,
|
||||
"p95" : 7,
|
||||
"p98" : 8,
|
||||
"p99" : 9,
|
||||
"p999" : 10,
|
||||
"stddev" : 11,
|
||||
"m15_rate" : 12,
|
||||
"m1_rate" : 13,
|
||||
"m5_rate" : 14,
|
||||
"mean_rate" : 15,
|
||||
"duration_units" : "seconds",
|
||||
"rate_units" : "calls/second"
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidTimerJSON(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validTimerJSON))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 1)
|
||||
require.Equal(t, "measurement", metrics[0].Name())
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"count": float64(1),
|
||||
"max": float64(2),
|
||||
"mean": float64(3),
|
||||
"min": float64(4),
|
||||
"p50": float64(5),
|
||||
"p75": float64(6),
|
||||
"p95": float64(7),
|
||||
"p98": float64(8),
|
||||
"p99": float64(9),
|
||||
"p999": float64(10),
|
||||
"stddev": float64(11),
|
||||
"m15_rate": float64(12),
|
||||
"m1_rate": float64(13),
|
||||
"m5_rate": float64(14),
|
||||
"mean_rate": float64(15),
|
||||
"duration_units": "seconds",
|
||||
"rate_units": "calls/second",
|
||||
}, metrics[0].Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "timer"}, metrics[0].Tags())
|
||||
}
|
||||
|
||||
// validAllJSON is a valid dropwizard json document containing one metric of each type
|
||||
const validAllJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {
|
||||
"measurement" : {"count" : 1}
|
||||
},
|
||||
"meters" : {
|
||||
"measurement" : {"count" : 1}
|
||||
},
|
||||
"gauges" : {
|
||||
"measurement" : {"value" : 1}
|
||||
},
|
||||
"histograms" : {
|
||||
"measurement" : {"count" : 1}
|
||||
},
|
||||
"timers" : {
|
||||
"measurement" : {"count" : 1}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseValidAllJSON(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(validAllJSON))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, metrics, 5)
|
||||
}
|
||||
|
||||
func TestTagParsingProblems(t *testing.T) {
|
||||
// giving a wrong path results in empty tags
|
||||
parser1 := &Parser{
|
||||
MetricRegistryPath: "metrics",
|
||||
TagsPath: "tags1",
|
||||
Log: testutil.Logger{},
|
||||
}
|
||||
require.NoError(t, parser1.Init())
|
||||
|
||||
metrics1, err1 := parser1.Parse([]byte(validEmbeddedCounterJSON))
|
||||
require.NoError(t, err1)
|
||||
require.Len(t, metrics1, 1)
|
||||
require.Equal(t, map[string]string{"metric_type": "counter"}, metrics1[0].Tags())
|
||||
|
||||
// giving a wrong TagsPath falls back to TagPathsMap
|
||||
parser2 := &Parser{
|
||||
MetricRegistryPath: "metrics",
|
||||
TagsPath: "tags1",
|
||||
TagPathsMap: map[string]string{"tag1": "tags.tag1"},
|
||||
Log: testutil.Logger{},
|
||||
}
|
||||
require.NoError(t, parser2.Init())
|
||||
metrics2, err2 := parser2.Parse([]byte(validEmbeddedCounterJSON))
|
||||
require.NoError(t, err2)
|
||||
require.Len(t, metrics2, 1)
|
||||
require.Equal(t, map[string]string{"metric_type": "counter", "tag1": "green"}, metrics2[0].Tags())
|
||||
}
|
||||
|
||||
// sampleTemplateJSON is a sample json document containing metrics to be tested against the templating engine.
|
||||
const sampleTemplateJSON = `
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"counters" : {},
|
||||
"meters" : {},
|
||||
"gauges" : {
|
||||
"vm.memory.heap.committed" : { "value" : 1 },
|
||||
"vm.memory.heap.init" : { "value" : 2 },
|
||||
"vm.memory.heap.max" : { "value" : 3 },
|
||||
"vm.memory.heap.usage" : { "value" : 4 },
|
||||
"vm.memory.heap.used" : { "value" : 5 },
|
||||
"vm.memory.non-heap.committed" : { "value" : 6 },
|
||||
"vm.memory.non-heap.init" : { "value" : 7 },
|
||||
"vm.memory.non-heap.max" : { "value" : 8 },
|
||||
"vm.memory.non-heap.usage" : { "value" : 9 },
|
||||
"vm.memory.non-heap.used" : { "value" : 10 }
|
||||
},
|
||||
"histograms" : {
|
||||
"jenkins.job.building.duration" : {
|
||||
"count" : 1,
|
||||
"max" : 2,
|
||||
"mean" : 3,
|
||||
"min" : 4,
|
||||
"p50" : 5,
|
||||
"p75" : 6,
|
||||
"p95" : 7,
|
||||
"p98" : 8,
|
||||
"p99" : 9,
|
||||
"p999" : 10,
|
||||
"stddev" : 11
|
||||
}
|
||||
},
|
||||
"timers" : {}
|
||||
}
|
||||
`
|
||||
|
||||
func TestParseSampleTemplateJSON(t *testing.T) {
|
||||
parser := &Parser{
|
||||
Separator: "_",
|
||||
Templates: []string{
|
||||
"jenkins.* measurement.metric.metric.field",
|
||||
"vm.* measurement.measurement.pool.field",
|
||||
},
|
||||
}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
metrics, err := parser.Parse([]byte(sampleTemplateJSON))
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, metrics, 11)
|
||||
|
||||
jenkinsMetric := search(metrics, "jenkins", nil, "")
|
||||
require.NotNil(t, jenkinsMetric, "the metrics should contain a jenkins measurement")
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"duration_count": float64(1),
|
||||
"duration_max": float64(2),
|
||||
"duration_mean": float64(3),
|
||||
"duration_min": float64(4),
|
||||
"duration_p50": float64(5),
|
||||
"duration_p75": float64(6),
|
||||
"duration_p95": float64(7),
|
||||
"duration_p98": float64(8),
|
||||
"duration_p99": float64(9),
|
||||
"duration_p999": float64(10),
|
||||
"duration_stddev": float64(11),
|
||||
}, jenkinsMetric.Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "histogram", "metric": "job_building"}, jenkinsMetric.Tags())
|
||||
|
||||
vmMemoryHeapCommitted := search(metrics, "vm_memory", map[string]string{"pool": "heap"}, "committed_value")
|
||||
require.NotNil(t, vmMemoryHeapCommitted)
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"committed_value": float64(1),
|
||||
}, vmMemoryHeapCommitted.Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "gauge", "pool": "heap"}, vmMemoryHeapCommitted.Tags())
|
||||
|
||||
vmMemoryNonHeapCommitted := search(metrics, "vm_memory", map[string]string{"pool": "non-heap"}, "committed_value")
|
||||
require.NotNil(t, vmMemoryNonHeapCommitted)
|
||||
require.Equal(t, map[string]interface{}{
|
||||
"committed_value": float64(6),
|
||||
}, vmMemoryNonHeapCommitted.Fields())
|
||||
require.Equal(t, map[string]string{"metric_type": "gauge", "pool": "non-heap"}, vmMemoryNonHeapCommitted.Tags())
|
||||
}
|
||||
|
||||
func search(metrics []telegraf.Metric, name string, tags map[string]string, fieldName string) telegraf.Metric {
|
||||
for _, v := range metrics {
|
||||
if v.Name() == name && containsAll(v.Tags(), tags) {
|
||||
if len(fieldName) == 0 {
|
||||
return v
|
||||
}
|
||||
if _, ok := v.Fields()[fieldName]; ok {
|
||||
return v
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func containsAll(t1, t2 map[string]string) bool {
|
||||
for k, v := range t2 {
|
||||
if foundValue, ok := t1[k]; !ok || v != foundValue {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func TestDropWizard(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []byte
|
||||
metrics []telegraf.Metric
|
||||
expectError bool
|
||||
}{
|
||||
{
|
||||
name: "minimal",
|
||||
input: []byte(`{"version": "3.0.0", "counters": {"cpu": {"value": 42}}}`),
|
||||
metrics: []telegraf.Metric{
|
||||
metric.New(
|
||||
"cpu",
|
||||
map[string]string{
|
||||
"metric_type": "counter",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"value": 42.0,
|
||||
},
|
||||
time.Unix(0, 0),
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "name with space unescaped",
|
||||
input: []byte(`{"version": "3.0.0", "counters": {"hello world": {"value": 42}}}`),
|
||||
metrics: []telegraf.Metric{
|
||||
metric.New(
|
||||
"hello world",
|
||||
map[string]string{
|
||||
"metric_type": "counter",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"value": 42.0,
|
||||
},
|
||||
time.Unix(0, 0),
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "name with space single slash escaped is not valid JSON",
|
||||
input: []byte(`{"version": "3.0.0", "counters": {"hello\ world": {"value": 42}}}`),
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "name with space double slash escape",
|
||||
input: []byte(`{"version": "3.0.0", "counters": {"hello\\ world": {"value": 42}}}`),
|
||||
metrics: []telegraf.Metric{
|
||||
metric.New(
|
||||
"hello world",
|
||||
map[string]string{
|
||||
"metric_type": "counter",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"value": 42.0,
|
||||
},
|
||||
time.Unix(0, 0),
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
parser := &Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
metrics, err := parser.Parse(tt.input)
|
||||
if tt.expectError {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
testutil.RequireMetricsEqual(t, tt.metrics, metrics, testutil.IgnoreTime())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const benchmarkData = `{
|
||||
"version": "3.0.0",
|
||||
"gauges" : {
|
||||
"benchmark,tags_host=myhost,tags_sdkver=3.11.5,tags_platform=python": {
|
||||
"value": 5.0
|
||||
},
|
||||
"benchmark,tags_host=myhost,tags_sdkver=3.11.4,tags_platform=python": {
|
||||
"value": 4.0
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func TestBenchmarkData(t *testing.T) {
|
||||
plugin := &Parser{}
|
||||
require.NoError(t, plugin.Init())
|
||||
|
||||
expected := []telegraf.Metric{
|
||||
metric.New(
|
||||
"benchmark",
|
||||
map[string]string{
|
||||
"metric_type": "gauge",
|
||||
"tags_host": "myhost",
|
||||
"tags_platform": "python",
|
||||
"tags_sdkver": "3.11.5",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"value": 5.0,
|
||||
},
|
||||
time.Unix(0, 0),
|
||||
),
|
||||
metric.New(
|
||||
"benchmark",
|
||||
map[string]string{
|
||||
"metric_type": "gauge",
|
||||
"tags_host": "myhost",
|
||||
"tags_platform": "python",
|
||||
"tags_sdkver": "3.11.4",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"value": 4.0,
|
||||
},
|
||||
time.Unix(0, 0),
|
||||
),
|
||||
}
|
||||
|
||||
actual, err := plugin.Parse([]byte(benchmarkData))
|
||||
require.NoError(t, err)
|
||||
testutil.RequireMetricsEqual(t, expected, actual, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||
}
|
||||
|
||||
func BenchmarkParsing(b *testing.B) {
|
||||
plugin := &Parser{}
|
||||
require.NoError(b, plugin.Init())
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
|
||||
plugin.Parse([]byte(benchmarkData))
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue