Adding upstream version 1.34.4.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
e393c3af3f
commit
4978089aab
4963 changed files with 677545 additions and 0 deletions
262
plugins/parsers/json/README.md
Normal file
262
plugins/parsers/json/README.md
Normal file
|
@ -0,0 +1,262 @@
|
|||
# JSON Parser Plugin
|
||||
|
||||
The JSON data format parses a [JSON][json] object or an array of objects into
|
||||
metric fields.
|
||||
|
||||
**NOTE:** All JSON numbers are converted to float fields. JSON strings and
|
||||
booleans are ignored unless specified in the `tag_key` or `json_string_fields`
|
||||
options.
|
||||
|
||||
## Configuration
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["example"]
|
||||
|
||||
## Data format to consume.
|
||||
## Each data format has its own unique set of configuration options, read
|
||||
## more about them here:
|
||||
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
|
||||
data_format = "json"
|
||||
|
||||
## When strict is true and a JSON array is being parsed, all objects within the
|
||||
## array must be valid
|
||||
json_strict = true
|
||||
|
||||
## Query is a GJSON path that specifies a specific chunk of JSON to be
|
||||
## parsed, if not specified the whole document will be parsed.
|
||||
##
|
||||
## GJSON query paths are described here:
|
||||
## https://github.com/tidwall/gjson/tree/v1.3.0#path-syntax
|
||||
json_query = ""
|
||||
|
||||
## Tag keys is an array of keys that should be added as tags. Matching keys
|
||||
## are no longer saved as fields. Supports wildcard glob matching.
|
||||
tag_keys = [
|
||||
"my_tag_1",
|
||||
"my_tag_2",
|
||||
"tags_*",
|
||||
"tag*"
|
||||
]
|
||||
|
||||
## Array of glob pattern strings or booleans keys that should be added as string fields.
|
||||
json_string_fields = []
|
||||
|
||||
## Name key is the key to use as the measurement name.
|
||||
json_name_key = ""
|
||||
|
||||
## Time key is the key containing the time that should be used to create the
|
||||
## metric.
|
||||
json_time_key = ""
|
||||
|
||||
## Time format is the time layout that should be used to interpret the json_time_key.
|
||||
## The time must be `unix`, `unix_ms`, `unix_us`, `unix_ns`, or a time in the
|
||||
## "reference time". To define a different format, arrange the values from
|
||||
## the "reference time" in the example to match the format you will be
|
||||
## using. For more information on the "reference time", visit
|
||||
## https://golang.org/pkg/time/#Time.Format
|
||||
## ex: json_time_format = "Mon Jan 2 15:04:05 -0700 MST 2006"
|
||||
## json_time_format = "2006-01-02T15:04:05Z07:00"
|
||||
## json_time_format = "01/02/2006 15:04:05"
|
||||
## json_time_format = "unix"
|
||||
## json_time_format = "unix_ms"
|
||||
json_time_format = ""
|
||||
|
||||
## Timezone allows you to provide an override for timestamps that
|
||||
## don't already include an offset
|
||||
## e.g. 04/06/2016 12:41:45
|
||||
##
|
||||
## Default: "" which renders UTC
|
||||
## Options are as follows:
|
||||
## 1. Local -- interpret based on machine localtime
|
||||
## 2. "America/New_York" -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
## 3. UTC -- or blank/unspecified, will return timestamp in UTC
|
||||
json_timezone = ""
|
||||
```
|
||||
|
||||
### json_query
|
||||
|
||||
The `json_query` is a [GJSON][gjson] path that can be used to transform the
|
||||
JSON document before being parsed. The query is performed before any other
|
||||
options are applied and the new document produced will be parsed instead of the
|
||||
original document, as such, the result of the query should be a JSON object or
|
||||
an array of objects.
|
||||
|
||||
Consult the GJSON [path syntax][gjson syntax] for details and examples, and
|
||||
consider using the [GJSON playground][gjson playground] for developing and
|
||||
debugging your query.
|
||||
|
||||
### json_time_key, json_time_format, json_timezone
|
||||
|
||||
By default the current time will be used for all created metrics, to set the
|
||||
time using the JSON document you can use the `json_time_key` and
|
||||
`json_time_format` options together to set the time to a value in the parsed
|
||||
document.
|
||||
|
||||
The `json_time_key` option specifies the key containing the time value and
|
||||
`json_time_format` must be set to `unix`, `unix_ms`, `unix_us`, `unix_ns`, or
|
||||
the Go "reference time" which is defined to be the specific time:
|
||||
`Mon Jan 2 15:04:05 MST 2006`.
|
||||
|
||||
Consult the Go [time][time parse] package for details and additional examples
|
||||
on how to set the time format.
|
||||
|
||||
When parsing times that don't include a timezone specifier, times are assumed to
|
||||
be UTC. To default to another timezone, or to local time, specify the
|
||||
`json_timezone` option. This option should be set to a [Unix TZ
|
||||
value](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), such as
|
||||
`America/New_York`, to `Local` to utilize the system timezone, or to `UTC`.
|
||||
|
||||
## Examples
|
||||
|
||||
### Basic Parsing
|
||||
|
||||
Config:
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["example"]
|
||||
name_override = "myjsonmetric"
|
||||
data_format = "json"
|
||||
```
|
||||
|
||||
Input:
|
||||
|
||||
```json
|
||||
{
|
||||
"a": 5,
|
||||
"b": {
|
||||
"c": 6
|
||||
},
|
||||
"ignored": "I'm a string"
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||
```text
|
||||
myjsonmetric a=5,b_c=6
|
||||
```
|
||||
|
||||
### Name, Tags, and String Fields
|
||||
|
||||
Config:
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["example"]
|
||||
json_name_key = "name"
|
||||
tag_keys = ["my_tag_1"]
|
||||
json_string_fields = ["b_my_field"]
|
||||
data_format = "json"
|
||||
```
|
||||
|
||||
Input:
|
||||
|
||||
```json
|
||||
{
|
||||
"a": 5,
|
||||
"b": {
|
||||
"c": 6,
|
||||
"my_field": "description"
|
||||
},
|
||||
"my_tag_1": "foo",
|
||||
"name": "my_json"
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||
```text
|
||||
my_json,my_tag_1=foo a=5,b_c=6,b_my_field="description"
|
||||
```
|
||||
|
||||
### Arrays
|
||||
|
||||
If the JSON data is an array, then each object within the array is parsed with
|
||||
the configured settings.
|
||||
|
||||
Config:
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["example"]
|
||||
data_format = "json"
|
||||
json_time_key = "b_time"
|
||||
json_time_format = "02 Jan 06 15:04 MST"
|
||||
```
|
||||
|
||||
Input:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"a": 5,
|
||||
"b": {
|
||||
"c": 6,
|
||||
"time":"04 Jan 06 15:04 MST"
|
||||
}
|
||||
},
|
||||
{
|
||||
"a": 7,
|
||||
"b": {
|
||||
"c": 8,
|
||||
"time":"11 Jan 07 15:04 MST"
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||
```text
|
||||
file a=5,b_c=6 1136387040000000000
|
||||
file a=7,b_c=8 1168527840000000000
|
||||
```
|
||||
|
||||
### Query
|
||||
|
||||
The `json_query` option can be used to parse a subset of the document.
|
||||
|
||||
Config:
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["example"]
|
||||
data_format = "json"
|
||||
tag_keys = ["first"]
|
||||
json_string_fields = ["last"]
|
||||
json_query = "obj.friends"
|
||||
```
|
||||
|
||||
Input:
|
||||
|
||||
```json
|
||||
{
|
||||
"obj": {
|
||||
"name": {"first": "Tom", "last": "Anderson"},
|
||||
"age":37,
|
||||
"children": ["Sara","Alex","Jack"],
|
||||
"fav.movie": "Deer Hunter",
|
||||
"friends": [
|
||||
{"first": "Dale", "last": "Murphy", "age": 44},
|
||||
{"first": "Roger", "last": "Craig", "age": 68},
|
||||
{"first": "Jane", "last": "Murphy", "age": 47}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
|
||||
```text
|
||||
file,first=Dale last="Murphy",age=44
|
||||
file,first=Roger last="Craig",age=68
|
||||
file,first=Jane last="Murphy",age=47
|
||||
```
|
||||
|
||||
[gjson]: https://github.com/tidwall/gjson
|
||||
[gjson syntax]: https://github.com/tidwall/gjson#path-syntax
|
||||
[gjson playground]: https://gjson.dev/
|
||||
[json]: https://www.json.org/
|
||||
[time parse]: https://golang.org/pkg/time/#Parse
|
70
plugins/parsers/json/json_flattener.go
Normal file
70
plugins/parsers/json/json_flattener.go
Normal file
|
@ -0,0 +1,70 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type JSONFlattener struct {
|
||||
Fields map[string]interface{}
|
||||
}
|
||||
|
||||
// FlattenJSON flattens nested maps/interfaces into a fields map (ignoring bools and string)
|
||||
func (f *JSONFlattener) FlattenJSON(
|
||||
fieldname string,
|
||||
v interface{}) error {
|
||||
if f.Fields == nil {
|
||||
f.Fields = make(map[string]interface{})
|
||||
}
|
||||
|
||||
return f.FullFlattenJSON(fieldname, v, false, false)
|
||||
}
|
||||
|
||||
// FullFlattenJSON flattens nested maps/interfaces into a fields map (including bools and string)
|
||||
func (f *JSONFlattener) FullFlattenJSON(fieldName string, v interface{}, convertString, convertBool bool) error {
|
||||
if f.Fields == nil {
|
||||
f.Fields = make(map[string]interface{})
|
||||
}
|
||||
|
||||
switch t := v.(type) {
|
||||
case map[string]interface{}:
|
||||
for fieldKey, fieldVal := range t {
|
||||
if fieldName != "" {
|
||||
fieldKey = fieldName + "_" + fieldKey
|
||||
}
|
||||
|
||||
err := f.FullFlattenJSON(fieldKey, fieldVal, convertString, convertBool)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, fieldVal := range t {
|
||||
fieldKey := strconv.Itoa(i)
|
||||
if fieldName != "" {
|
||||
fieldKey = fieldName + "_" + fieldKey
|
||||
}
|
||||
err := f.FullFlattenJSON(fieldKey, fieldVal, convertString, convertBool)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case float64:
|
||||
f.Fields[fieldName] = t
|
||||
case string:
|
||||
if !convertString {
|
||||
return nil
|
||||
}
|
||||
f.Fields[fieldName] = v.(string)
|
||||
case bool:
|
||||
if !convertBool {
|
||||
return nil
|
||||
}
|
||||
f.Fields[fieldName] = v.(bool)
|
||||
case nil:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("json flattener: got unexpected type %T with value %v (%s)", t, t, fieldName)
|
||||
}
|
||||
return nil
|
||||
}
|
248
plugins/parsers/json/parser.go
Normal file
248
plugins/parsers/json/parser.go
Normal file
|
@ -0,0 +1,248 @@
|
|||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/filter"
|
||||
"github.com/influxdata/telegraf/internal"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
)
|
||||
|
||||
var (
|
||||
utf8BOM = []byte("\xef\xbb\xbf")
|
||||
ErrWrongType = errors.New("must be an object or an array of objects")
|
||||
)
|
||||
|
||||
type Parser struct {
|
||||
MetricName string `toml:"metric_name"`
|
||||
TagKeys []string `toml:"tag_keys"`
|
||||
NameKey string `toml:"json_name_key"`
|
||||
StringFields []string `toml:"json_string_fields"`
|
||||
Query string `toml:"json_query"`
|
||||
TimeKey string `toml:"json_time_key"`
|
||||
TimeFormat string `toml:"json_time_format"`
|
||||
Timezone string `toml:"json_timezone"`
|
||||
Strict bool `toml:"json_strict"`
|
||||
|
||||
DefaultTags map[string]string `toml:"-"`
|
||||
Log telegraf.Logger `toml:"-"`
|
||||
|
||||
location *time.Location
|
||||
tagFilter filter.Filter
|
||||
stringFilter filter.Filter
|
||||
}
|
||||
|
||||
func (p *Parser) parseArray(data []interface{}, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
results := make([]telegraf.Metric, 0)
|
||||
|
||||
for _, item := range data {
|
||||
switch v := item.(type) {
|
||||
case map[string]interface{}:
|
||||
metrics, err := p.parseObject(v, timestamp)
|
||||
if err != nil {
|
||||
if p.Strict {
|
||||
return nil, err
|
||||
}
|
||||
continue
|
||||
}
|
||||
results = append(results, metrics...)
|
||||
default:
|
||||
return nil, ErrWrongType
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseObject(data map[string]interface{}, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
tags := make(map[string]string)
|
||||
for k, v := range p.DefaultTags {
|
||||
tags[k] = v
|
||||
}
|
||||
|
||||
f := JSONFlattener{}
|
||||
err := f.FullFlattenJSON("", data, true, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := p.MetricName
|
||||
|
||||
// checks if json_name_key is set
|
||||
if p.NameKey != "" {
|
||||
if field, ok := f.Fields[p.NameKey].(string); ok {
|
||||
name = field
|
||||
}
|
||||
}
|
||||
|
||||
// if time key is specified, set timestamp to it
|
||||
if p.TimeKey != "" {
|
||||
if p.TimeFormat == "" {
|
||||
err := errors.New("use of 'json_time_key' requires 'json_time_format'")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if f.Fields[p.TimeKey] == nil {
|
||||
err := errors.New("'json_time_key' could not be found")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
timestamp, err = internal.ParseTimestamp(p.TimeFormat, f.Fields[p.TimeKey], p.location)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(f.Fields, p.TimeKey)
|
||||
|
||||
// if the year is 0, set to current year
|
||||
if timestamp.Year() == 0 {
|
||||
timestamp = timestamp.AddDate(time.Now().Year(), 0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
tags, nFields := p.switchFieldToTag(tags, f.Fields)
|
||||
m := metric.New(name, tags, nFields, timestamp)
|
||||
|
||||
return []telegraf.Metric{m}, nil
|
||||
}
|
||||
|
||||
// will take in field map with strings and bools,
|
||||
// search for tag-keys that match fieldnames and add them to tags
|
||||
// will delete any strings/bools that shouldn't be fields
|
||||
// assumes that any non-numeric values in TagKeys should be displayed as tags
|
||||
func (p *Parser) switchFieldToTag(tags map[string]string, fields map[string]interface{}) (map[string]string, map[string]interface{}) {
|
||||
for name, value := range fields {
|
||||
if p.tagFilter == nil {
|
||||
continue
|
||||
}
|
||||
// skip switch statement if tagkey doesn't match fieldname
|
||||
if !p.tagFilter.Match(name) {
|
||||
continue
|
||||
}
|
||||
// switch any fields in TagKeys into tags
|
||||
switch t := value.(type) {
|
||||
case string:
|
||||
tags[name] = t
|
||||
delete(fields, name)
|
||||
case bool:
|
||||
tags[name] = strconv.FormatBool(t)
|
||||
delete(fields, name)
|
||||
case float64:
|
||||
tags[name] = strconv.FormatFloat(t, 'f', -1, 64)
|
||||
delete(fields, name)
|
||||
default:
|
||||
p.Log.Errorf("Unrecognized type %T", value)
|
||||
}
|
||||
}
|
||||
|
||||
// remove any additional string/bool values from fields
|
||||
for fk := range fields {
|
||||
switch fields[fk].(type) {
|
||||
case string, bool:
|
||||
if p.stringFilter != nil && p.stringFilter.Match(fk) {
|
||||
continue
|
||||
}
|
||||
delete(fields, fk)
|
||||
}
|
||||
}
|
||||
return tags, fields
|
||||
}
|
||||
|
||||
func (p *Parser) Init() error {
|
||||
var err error
|
||||
|
||||
p.stringFilter, err = filter.Compile(p.StringFields)
|
||||
if err != nil {
|
||||
return fmt.Errorf("compiling string-fields filter failed: %w", err)
|
||||
}
|
||||
|
||||
p.tagFilter, err = filter.Compile(p.TagKeys)
|
||||
if err != nil {
|
||||
return fmt.Errorf("compiling tag-key filter failed: %w", err)
|
||||
}
|
||||
|
||||
if p.Timezone != "" {
|
||||
loc, err := time.LoadLocation(p.Timezone)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid timezone: %w", err)
|
||||
}
|
||||
p.location = loc
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||
if p.Query != "" {
|
||||
result := gjson.GetBytes(buf, p.Query)
|
||||
buf = []byte(result.Raw)
|
||||
if !result.IsArray() && !result.IsObject() && result.Type != gjson.Null {
|
||||
err := fmt.Errorf("query path must lead to a JSON object, array of objects or null, but lead to: %v", result.Type)
|
||||
return nil, err
|
||||
}
|
||||
if result.Type == gjson.Null {
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
buf = bytes.TrimSpace(buf)
|
||||
buf = bytes.TrimPrefix(buf, utf8BOM)
|
||||
if len(buf) == 0 {
|
||||
return make([]telegraf.Metric, 0), nil
|
||||
}
|
||||
|
||||
var data interface{}
|
||||
err := json.Unmarshal(buf, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
timestamp := time.Now().UTC()
|
||||
switch v := data.(type) {
|
||||
case map[string]interface{}:
|
||||
return p.parseObject(v, timestamp)
|
||||
case []interface{}:
|
||||
return p.parseArray(v, timestamp)
|
||||
case nil:
|
||||
return nil, nil
|
||||
default:
|
||||
return nil, ErrWrongType
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||
metrics, err := p.Parse([]byte(line + "\n"))
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(metrics) < 1 {
|
||||
return nil, fmt.Errorf("can not parse the line: %s, for data format: json ", line)
|
||||
}
|
||||
|
||||
return metrics[0], nil
|
||||
}
|
||||
|
||||
func (p *Parser) SetDefaultTags(tags map[string]string) {
|
||||
p.DefaultTags = tags
|
||||
}
|
||||
|
||||
func init() {
|
||||
parsers.Add("json",
|
||||
func(defaultMetricName string) telegraf.Parser {
|
||||
return &Parser{
|
||||
MetricName: defaultMetricName,
|
||||
Strict: true,
|
||||
}
|
||||
})
|
||||
}
|
1493
plugins/parsers/json/parser_test.go
Normal file
1493
plugins/parsers/json/parser_test.go
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue