Adding upstream version 1.34.4.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
e393c3af3f
commit
4978089aab
4963 changed files with 677545 additions and 0 deletions
276
plugins/parsers/json_v2/README.md
Normal file
276
plugins/parsers/json_v2/README.md
Normal file
|
@ -0,0 +1,276 @@
|
|||
# JSON Parser Version 2 Plugin
|
||||
|
||||
This parser takes valid JSON input and turns it into line protocol. The query
|
||||
syntax supported is [GJSON Path
|
||||
Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md), you can go to
|
||||
this playground to test out your GJSON path here:
|
||||
[gjson.dev/](https://gjson.dev). You can find multiple examples under the
|
||||
[`testdata`][] folder.
|
||||
|
||||
> [!WARNING]
|
||||
> In the current state of the implementation, the json_v2 parser should be avoided in favor of the [XPath Parser](../xpath), especially when working with arrays.
|
||||
|
||||
## Configuration
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
urls = []
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "" # A string that will become the new measurement name
|
||||
measurement_name_path = "" # A string with valid GJSON path syntax, will override measurement_name
|
||||
timestamp_path = "" # A string with valid GJSON path syntax to a valid timestamp (single value)
|
||||
timestamp_format = "" # A string with a valid timestamp format (see below for possible values)
|
||||
timestamp_timezone = "" # A string with a valid timezone (see below for possible values)
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "" # A string with valid GJSON path syntax to a non-array/non-object value
|
||||
rename = "new name" # A string with a new name for the tag key
|
||||
## Setting optional to true will suppress errors if the configured Path doesn't match the JSON
|
||||
optional = false
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "" # A string with valid GJSON path syntax to a non-array/non-object value
|
||||
rename = "new name" # A string with a new name for the tag key
|
||||
type = "int" # A string specifying the type (int,uint,float,string,bool)
|
||||
## Setting optional to true will suppress errors if the configured Path doesn't match the JSON
|
||||
optional = false
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "" # A string with valid GJSON path syntax, can include array's and object's
|
||||
|
||||
## Setting optional to true will suppress errors if the configured Path doesn't match the JSON
|
||||
optional = false
|
||||
|
||||
## Configuration to define what JSON keys should be used as timestamps ##
|
||||
timestamp_key = "" # A JSON key (for a nested key, prepend the parent keys with underscores) to a valid timestamp
|
||||
timestamp_format = "" # A string with a valid timestamp format (see below for possible values)
|
||||
timestamp_timezone = "" # A string with a valid timezone (see below for possible values)
|
||||
|
||||
### Configuration to define what JSON keys should be included and how (field/tag) ###
|
||||
tags = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) to be a tag instead of a field, when adding a JSON key in this list you don't have to define it in the included_keys list
|
||||
included_keys = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) that should be only included in result
|
||||
excluded_keys = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) that shouldn't be included in result
|
||||
# When a tag/field sub-table is defined, they will be the only field/tag's along with any keys defined in the included_keys list.
|
||||
# If the resulting values aren't included in the object/array returned by the root object path, it won't be included.
|
||||
# You can define as many tag/field sub-tables as you want.
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "" # # A string with valid GJSON path syntax, can include array's and object's
|
||||
rename = "new name" # A string with a new name for the tag key
|
||||
[[inputs.file.json_v2.object.field]]
|
||||
path = "" # # A string with valid GJSON path syntax, can include array's and object's
|
||||
rename = "new name" # A string with a new name for the tag key
|
||||
type = "int" # A string specifying the type (int,uint,float,string,bool)
|
||||
|
||||
### Configuration to modify the resulting line protocol ###
|
||||
disable_prepend_keys = false (or true, just not both)
|
||||
[inputs.file.json_v2.object.renames] # A map of JSON keys (for a nested key, prepend the parent keys with underscores) with a new name for the tag key
|
||||
key = "new name"
|
||||
[inputs.file.json_v2.object.fields] # A map of JSON keys (for a nested key, prepend the parent keys with underscores) with a type (int,uint,float,string,bool)
|
||||
key = "int"
|
||||
```
|
||||
|
||||
You configure this parser by describing the line protocol you want by defining
|
||||
the fields and tags from the input. The configuration is divided into config
|
||||
sub-tables called `field`, `tag`, and `object`. In the example below you can see
|
||||
all the possible configuration keys you can define for each config table. In the
|
||||
sections that follow these configuration keys are defined in more detail.
|
||||
|
||||
---
|
||||
|
||||
### root config options
|
||||
|
||||
* **measurement_name (OPTIONAL)**: Will set the measurement name to the provided string.
|
||||
* **measurement_name_path (OPTIONAL)**: You can define a query with [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md) to set a measurement name from the JSON input. The query must return a single data value or it will use the default measurement name. This takes precedence over `measurement_name`.
|
||||
* **timestamp_path (OPTIONAL)**: You can define a query with [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md) to set a timestamp from the JSON input. The query must return a single data value or it will default to the current time.
|
||||
* **timestamp_format (OPTIONAL, but REQUIRED when timestamp_path is defined**: Must be set to `unix`, `unix_ms`, `unix_us`, `unix_ns`, or
|
||||
the Go "reference time" which is defined to be the specific time:
|
||||
`Mon Jan 2 15:04:05 MST 2006`
|
||||
* **timestamp_timezone (OPTIONAL, but REQUIRES timestamp_path**: This option should be set to a
|
||||
[Unix TZ value](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones),
|
||||
such as `America/New_York`, to `Local` to utilize the system timezone, or to `UTC`. Defaults to `UTC`
|
||||
|
||||
---
|
||||
|
||||
### `field` and `tag` config options
|
||||
|
||||
`field` and `tag` represent the elements of [line protocol][lp-ref]. You can use
|
||||
the `field` and `tag` config tables to gather a single value or an array of
|
||||
values that all share the same type and name. With this you can add a field or
|
||||
tag to a line protocol from data stored anywhere in your JSON. If you define the
|
||||
GJSON path to return a single value then you will get a single resulting line
|
||||
protocol that contains the field/tag. If you define the GJSON path to return an
|
||||
array of values, then each field/tag will be put into a separate line protocol
|
||||
(you use the # character to retrieve JSON arrays, find examples
|
||||
[here](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md#arrays)).
|
||||
|
||||
Note that objects are handled separately, therefore if you provide a path that
|
||||
returns a object it will be ignored. You will need use the `object` config table
|
||||
to parse objects, because `field` and `tag` doesn't handle relationships between
|
||||
data. Each `field` and `tag` you define is handled as a separate data point.
|
||||
|
||||
The notable difference between `field` and `tag`, is that `tag` values will
|
||||
always be type string while `field` can be multiple types. You can define the
|
||||
type of `field` to be any [type that line protocol supports][types], which are:
|
||||
|
||||
* float
|
||||
* int
|
||||
* uint
|
||||
* string
|
||||
* bool
|
||||
|
||||
[lp-ref]: https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/
|
||||
|
||||
[types]: https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/#data-types-and-format
|
||||
|
||||
#### **field**
|
||||
|
||||
Using this field configuration you can gather a non-array/non-object
|
||||
values. Note this acts as a global field when used with the `object`
|
||||
configuration, if you gather an array of values using `object` then the field
|
||||
gathered will be added to each resulting line protocol without acknowledging its
|
||||
location in the original JSON. This is defined in TOML as an array table using
|
||||
double brackets.
|
||||
|
||||
* **path (REQUIRED)**: A string with valid GJSON path syntax to a non-array/non-object value
|
||||
* **name (OPTIONAL)**: You can define a string value to set the field name. If not defined it will use the trailing word from the provided query.
|
||||
* **type (OPTIONAL)**: You can define a string value to set the desired type (float, int, uint, string, bool). If not defined it won't enforce a type and default to using the original type defined in the JSON (bool, float, or string).
|
||||
* **optional (OPTIONAL)**: Setting optional to true will suppress errors if the configured Path doesn't match the JSON. This should be used with caution because it removes the safety net of verifying the provided path. An example case to use this is with the `inputs.mqtt_consumer` plugin when you are expecting multiple JSON files.
|
||||
|
||||
#### **tag**
|
||||
|
||||
Using this tag configuration you can gather a non-array/non-object values. Note
|
||||
this acts as a global tag when used with the `object` configuration, if you
|
||||
gather an array of values using `object` then the tag gathered will be added to
|
||||
each resulting line protocol without acknowledging its location in the original
|
||||
JSON. This is defined in TOML as an array table using double brackets.
|
||||
|
||||
* **path (REQUIRED)**: A string with valid GJSON path syntax to a non-array/non-object value
|
||||
* **name (OPTIONAL)**: You can define a string value to set the field name. If not defined it will use the trailing word from the provided query.
|
||||
* **optional (OPTIONAL)**: Setting optional to true will suppress errors if the configured Path doesn't match the JSON. This should be used with caution because it removes the safety net of verifying the provided path. An example case to use this is with the `inputs.mqtt_consumer` plugin when you are expecting multiple JSON files.
|
||||
|
||||
For good examples in using `field` and `tag` you can reference the following
|
||||
example configs:
|
||||
|
||||
---
|
||||
|
||||
### object
|
||||
|
||||
With the configuration section `object`, you can gather values from [JSON
|
||||
objects](https://www.w3schools.com/js/js_json_objects.asp). This is defined in
|
||||
TOML as an array table using double brackets.
|
||||
|
||||
#### The following keys can be set for `object`
|
||||
|
||||
* **path (REQUIRED)**: You must define the path query that gathers the object with [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md)
|
||||
* **optional (OPTIONAL)**: Setting optional to true will suppress errors if the configured Path doesn't match the JSON. This should be used with caution because it removes the safety net of verifying the provided path. An example case to use this is with the `inputs.mqtt_consumer` plugin when you are expecting multiple JSON files.
|
||||
|
||||
*Keys to define what JSON keys should be used as timestamps:*
|
||||
|
||||
* **timestamp_key(OPTIONAL)**: You can define a json key (for a nested key, prepend the parent keys with underscores) for the value to be set as the timestamp from the JSON input.
|
||||
* **timestamp_format (OPTIONAL, but REQUIRED when timestamp_key is defined**: Must be set to `unix`, `unix_ms`, `unix_us`, `unix_ns`, or
|
||||
the Go "reference time" which is defined to be the specific time:
|
||||
`Mon Jan 2 15:04:05 MST 2006`
|
||||
* **timestamp_timezone (OPTIONAL, but REQUIRES timestamp_key**: This option should be set to a
|
||||
[Unix TZ value](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones),
|
||||
such as `America/New_York`, to `Local` to utilize the system timezone, or to `UTC`. Defaults to `UTC`
|
||||
|
||||
*Configuration to define what JSON keys should be included and how (field/tag):*
|
||||
|
||||
* **included_keys (OPTIONAL)**: You can define a list of key's that should be the only data included in the line protocol, by default it will include everything.
|
||||
* **excluded_keys (OPTIONAL)**: You can define json keys to be excluded in the line protocol, for a nested key, prepend the parent keys with underscores
|
||||
* **tags (OPTIONAL)**: You can define json keys to be set as tags instead of fields, if you define a key that is an array or object then all nested values will become a tag
|
||||
* **field (OPTIONAL, defined in TOML as an array table using double brackets)**: Identical to the [field](#field) table you can define, but with two key differences. The path supports arrays and objects and is defined under the object table and therefore will adhere to how the JSON is structured. You want to use this if you want the field/tag to be added as it would if it were in the included_key list, but then use the GJSON path syntax.
|
||||
* **tag (OPTIONAL, defined in TOML as an array table using double brackets)**: Identical to the [tag](#tag) table you can define, but with two key differences. The path supports arrays and objects and is defined under the object table and therefore will adhere to how the JSON is structured. You want to use this if you want the field/tag to be added as it would if it were in the included_key list, but then use the GJSON path syntax.
|
||||
|
||||
*Configuration to modify the resulting line protocol:*
|
||||
|
||||
* **disable_prepend_keys (OPTIONAL)**: Set to true to prevent resulting nested data to contain the parent key prepended to its key **NOTE**: duplicate names can overwrite each other when this is enabled
|
||||
* **renames (OPTIONAL, defined in TOML as a table using single bracket)**: A table matching the json key with the desired name (opposed to defaulting to using the key), use names that include the prepended keys of its parent keys for nested results
|
||||
* **fields (OPTIONAL, defined in TOML as a table using single bracket)**: A table matching the json key with the desired type (int,string,bool,float), if you define a key that is an array or object then all nested values will become that type
|
||||
|
||||
## Arrays and Objects
|
||||
|
||||
The following describes the high-level approach when parsing arrays and objects:
|
||||
|
||||
**Array**: Every element in an array is treated as a *separate* line protocol
|
||||
|
||||
**Object**: Every key/value in a object is treated as a *single* line protocol
|
||||
|
||||
When handling nested arrays and objects, these above rules continue to apply as
|
||||
the parser creates line protocol. When an object has multiple array's as values,
|
||||
the array's will become separate line protocol containing only non-array values
|
||||
from the object. Below you can see an example of this behavior, with an input
|
||||
json containing an array of book objects that has a nested array of characters.
|
||||
|
||||
Example JSON:
|
||||
|
||||
```json
|
||||
{
|
||||
"book": {
|
||||
"title": "The Lord Of The Rings",
|
||||
"chapters": [
|
||||
"A Long-expected Party",
|
||||
"The Shadow of the Past"
|
||||
],
|
||||
"author": "Tolkien",
|
||||
"characters": [
|
||||
{
|
||||
"name": "Bilbo",
|
||||
"species": "hobbit"
|
||||
},
|
||||
{
|
||||
"name": "Frodo",
|
||||
"species": "hobbit"
|
||||
}
|
||||
],
|
||||
"random": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
|
||||
```toml
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/multiple_arrays_in_object/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "book"
|
||||
tags = ["title"]
|
||||
disable_prepend_keys = true
|
||||
```
|
||||
|
||||
Expected line protocol:
|
||||
|
||||
```text
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="A Long-expected Party"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",name="Bilbo",species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",name="Frodo",species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",random=1
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",random=2
|
||||
|
||||
```
|
||||
|
||||
You can find more complicated examples under the folder [`testdata`][].
|
||||
|
||||
[`testdata`]: https://github.com/influxdata/telegraf/tree/master/plugins/parsers/json_v2/testdata
|
||||
|
||||
## Types
|
||||
|
||||
For each field you have the option to define the types. The following rules are
|
||||
in place for this configuration:
|
||||
|
||||
* If a type is explicitly defined, the parser will enforce this type and convert the data to the defined type if possible. If the type can't be converted then the parser will fail.
|
||||
* If a type isn't defined, the parser will use the default type defined in the JSON (int, float, string)
|
||||
|
||||
The type values you can set:
|
||||
|
||||
* `int`, bool, floats or strings (with valid numbers) can be converted to a int.
|
||||
* `uint`, bool, floats or strings (with valid numbers) can be converted to a uint.
|
||||
* `string`, any data can be formatted as a string.
|
||||
* `float`, string values (with valid numbers) or integers can be converted to a float.
|
||||
* `bool`, the string values "true" or "false" (regardless of capitalization) or the integer values `0` or `1` can be turned to a bool.
|
747
plugins/parsers/json_v2/parser.go
Normal file
747
plugins/parsers/json_v2/parser.go
Normal file
|
@ -0,0 +1,747 @@
|
|||
package json_v2
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/dimchansky/utfbom"
|
||||
"github.com/tidwall/gjson"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/internal"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
)
|
||||
|
||||
// Parser adheres to the parser interface, contains the parser configuration, and data required to parse JSON
|
||||
type Parser struct {
|
||||
Configs []Config `toml:"json_v2"`
|
||||
DefaultMetricName string `toml:"-"`
|
||||
DefaultTags map[string]string `toml:"-"`
|
||||
Log telegraf.Logger `toml:"-"`
|
||||
|
||||
// **** The struct fields below this comment are used for processing individual configs ****
|
||||
|
||||
// measurementName is the name of the current config used in each line protocol
|
||||
measurementName string
|
||||
|
||||
// **** Specific for object configuration ****
|
||||
// subPathResults contains the results of sub-gjson path expressions provided in fields/tags table within object config
|
||||
subPathResults []pathResult
|
||||
// iterateObjects dictates if ExpandArray function will handle objects
|
||||
iterateObjects bool
|
||||
// objectConfig contains the config for an object, some info is needed while iterating over the gjson results
|
||||
objectConfig Object
|
||||
// parseMutex is here because Parse() is not threadsafe. If it is made threadsafe at some point, then we won't need it anymore.
|
||||
parseMutex sync.Mutex
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
MeasurementName string `toml:"measurement_name"` // OPTIONAL
|
||||
MeasurementNamePath string `toml:"measurement_name_path"` // OPTIONAL
|
||||
TimestampPath string `toml:"timestamp_path"` // OPTIONAL
|
||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||
|
||||
Fields []DataSet `toml:"field"`
|
||||
Tags []DataSet `toml:"tag"`
|
||||
JSONObjects []Object `toml:"object"`
|
||||
|
||||
Location *time.Location
|
||||
}
|
||||
|
||||
type DataSet struct {
|
||||
Path string `toml:"path"` // REQUIRED
|
||||
Type string `toml:"type"` // OPTIONAL, can't be set for tags they will always be a string
|
||||
Rename string `toml:"rename"`
|
||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||
}
|
||||
|
||||
type Object struct {
|
||||
Path string `toml:"path"` // REQUIRED
|
||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||
TimestampKey string `toml:"timestamp_key"`
|
||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||
Renames map[string]string `toml:"renames"`
|
||||
Fields map[string]string `toml:"fields"`
|
||||
Tags []string `toml:"tags"`
|
||||
IncludedKeys []string `toml:"included_keys"`
|
||||
ExcludedKeys []string `toml:"excluded_keys"`
|
||||
DisablePrependKeys bool `toml:"disable_prepend_keys"`
|
||||
FieldPaths []DataSet `toml:"field"`
|
||||
TagPaths []DataSet `toml:"tag"`
|
||||
}
|
||||
|
||||
type pathResult struct {
|
||||
result gjson.Result
|
||||
tag bool
|
||||
DataSet
|
||||
}
|
||||
|
||||
type metricNode struct {
|
||||
ParentIndex int
|
||||
OutputName string
|
||||
SetName string
|
||||
Tag bool
|
||||
DesiredType string // Can be "int", "uint", "float", "bool", "string"
|
||||
/*
|
||||
IncludeCollection is only used when processing objects and is responsible for containing the gjson results
|
||||
found by the gjson paths provided in the FieldPaths and TagPaths configs.
|
||||
*/
|
||||
IncludeCollection *pathResult
|
||||
|
||||
Metric telegraf.Metric
|
||||
gjson.Result
|
||||
}
|
||||
|
||||
func (p *Parser) Init() error {
|
||||
if len(p.Configs) == 0 {
|
||||
return errors.New("no configuration provided")
|
||||
}
|
||||
// Propagate the default metric name to the configs in case it is not set there
|
||||
for i, cfg := range p.Configs {
|
||||
if cfg.MeasurementName == "" {
|
||||
p.Configs[i].MeasurementName = p.DefaultMetricName
|
||||
}
|
||||
if cfg.TimestampTimezone != "" {
|
||||
loc, err := time.LoadLocation(cfg.TimestampTimezone)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid timezone in config %d: %w", i+1, err)
|
||||
}
|
||||
p.Configs[i].Location = loc
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
|
||||
// What we've done here is to put the entire former contents of Parse()
|
||||
// into parseCriticalPath().
|
||||
//
|
||||
// As we determine what bits of parseCriticalPath() are and are not
|
||||
// threadsafe, we can lift the safe pieces back up into Parse(), and
|
||||
// shrink the scope (or scopes, if the critical sections are disjoint)
|
||||
// of those pieces that need to be protected with a mutex.
|
||||
return p.parseCriticalPath(input)
|
||||
}
|
||||
|
||||
func (p *Parser) parseCriticalPath(input []byte) ([]telegraf.Metric, error) {
|
||||
p.parseMutex.Lock()
|
||||
defer p.parseMutex.Unlock()
|
||||
|
||||
// Clear intermediate results if left by previous call
|
||||
p.subPathResults = nil
|
||||
|
||||
reader := strings.NewReader(string(input))
|
||||
body, _ := utfbom.Skip(reader)
|
||||
input, err := io.ReadAll(body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read body after BOM removal: %w", err)
|
||||
}
|
||||
|
||||
// Only valid JSON is supported
|
||||
if !gjson.Valid(string(input)) {
|
||||
return nil, fmt.Errorf("invalid JSON provided, unable to parse: %s", string(input))
|
||||
}
|
||||
|
||||
var metrics []telegraf.Metric
|
||||
// timestamp defaults to current time
|
||||
now := time.Now()
|
||||
|
||||
for _, c := range p.Configs {
|
||||
// Measurement name can either be hardcoded, or parsed from the JSON using a GJSON path expression
|
||||
p.measurementName = c.MeasurementName
|
||||
if c.MeasurementNamePath != "" {
|
||||
result := gjson.GetBytes(input, c.MeasurementNamePath)
|
||||
if !result.IsArray() && !result.IsObject() {
|
||||
p.measurementName = result.String()
|
||||
}
|
||||
}
|
||||
|
||||
// timestamp can be parsed from the JSON using a GJSON path expression
|
||||
timestamp := now
|
||||
if c.TimestampPath != "" {
|
||||
result := gjson.GetBytes(input, c.TimestampPath)
|
||||
|
||||
if result.Type == gjson.Null {
|
||||
p.Log.Debugf("Message: %s", input)
|
||||
return nil, fmt.Errorf("the timestamp path %q returned NULL", c.TimestampPath)
|
||||
}
|
||||
if !result.IsArray() && !result.IsObject() {
|
||||
if c.TimestampFormat == "" {
|
||||
err := errors.New("use of 'timestamp_path' requires 'timestamp_format'")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var err error
|
||||
timestamp, err = internal.ParseTimestamp(c.TimestampFormat, result.String(), c.Location)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to parse timestamp %q: %w", result.String(), err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fields, err := p.processMetric(input, c.Fields, false, timestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags, err := p.processMetric(input, c.Tags, true, timestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects, err := p.processObjects(input, c.JSONObjects, timestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cmetrics := cartesianProduct(tags, fields)
|
||||
|
||||
if len(objects) != 0 && len(cmetrics) != 0 {
|
||||
cmetrics = cartesianProduct(objects, cmetrics)
|
||||
} else {
|
||||
cmetrics = append(cmetrics, objects...)
|
||||
}
|
||||
|
||||
metrics = append(metrics, cmetrics...)
|
||||
}
|
||||
|
||||
for k, v := range p.DefaultTags {
|
||||
for _, t := range metrics {
|
||||
t.AddTag(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
return metrics, nil
|
||||
}
|
||||
|
||||
// processMetric will iterate over all 'field' or 'tag' configs and create metrics for each
|
||||
// A field/tag can either be a single value or an array of values, each resulting in its own metric
|
||||
// For multiple configs, a set of metrics is created from the cartesian product of each separate config
|
||||
func (p *Parser) processMetric(input []byte, data []DataSet, tag bool, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
if len(data) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
p.iterateObjects = false
|
||||
metrics := make([][]telegraf.Metric, 0, len(data))
|
||||
for _, c := range data {
|
||||
if c.Path == "" {
|
||||
return nil, errors.New("the GJSON path is required")
|
||||
}
|
||||
result := gjson.GetBytes(input, c.Path)
|
||||
if err := p.checkResult(result, c.Path); err != nil {
|
||||
if c.Optional {
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if result.IsObject() {
|
||||
p.Log.Debugf("Found object in the path %q, ignoring it please use 'object' to gather metrics from objects", c.Path)
|
||||
continue
|
||||
}
|
||||
|
||||
setName := c.Rename
|
||||
// Default to the last path word, should be the upper key name
|
||||
if setName == "" {
|
||||
s := strings.Split(c.Path, ".")
|
||||
setName = s[len(s)-1]
|
||||
}
|
||||
setName = strings.ReplaceAll(setName, " ", "_")
|
||||
|
||||
mNode := metricNode{
|
||||
OutputName: setName,
|
||||
SetName: setName,
|
||||
DesiredType: c.Type,
|
||||
Tag: tag,
|
||||
Metric: metric.New(
|
||||
p.measurementName,
|
||||
make(map[string]string),
|
||||
make(map[string]interface{}),
|
||||
timestamp,
|
||||
),
|
||||
Result: result,
|
||||
ParentIndex: result.Index,
|
||||
}
|
||||
|
||||
// Expand all array's and nested arrays into separate metrics
|
||||
nodes, err := p.expandArray(mNode, timestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metrics = append(metrics, nodes)
|
||||
}
|
||||
|
||||
for i := 1; i < len(metrics); i++ {
|
||||
metrics[i] = cartesianProduct(metrics[i-1], metrics[i])
|
||||
}
|
||||
|
||||
if len(metrics) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return metrics[len(metrics)-1], nil
|
||||
}
|
||||
|
||||
func cartesianProduct(a, b []telegraf.Metric) []telegraf.Metric {
|
||||
if len(a) == 0 {
|
||||
return b
|
||||
}
|
||||
if len(b) == 0 {
|
||||
return a
|
||||
}
|
||||
p := make([]telegraf.Metric, 0, len(a)*len(b))
|
||||
for _, a := range a {
|
||||
for _, b := range b {
|
||||
m := a.Copy()
|
||||
mergeMetric(b, m)
|
||||
p = append(p, m)
|
||||
}
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func mergeMetric(a, m telegraf.Metric) {
|
||||
for _, f := range a.FieldList() {
|
||||
m.AddField(f.Key, f.Value)
|
||||
}
|
||||
for _, t := range a.TagList() {
|
||||
m.AddTag(t.Key, t.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// expandArray will recursively create a new metricNode for each element in a JSON array or single value
|
||||
func (p *Parser) expandArray(result metricNode, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
var results []telegraf.Metric
|
||||
|
||||
if result.IsObject() {
|
||||
if !p.iterateObjects {
|
||||
p.Log.Debugf("Found object in query ignoring it please use 'object' to gather metrics from objects")
|
||||
return results, nil
|
||||
}
|
||||
r, err := p.combineObject(result, timestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
results = append(results, r...)
|
||||
return results, nil
|
||||
}
|
||||
|
||||
if result.IsArray() {
|
||||
if result.IncludeCollection == nil && (len(p.objectConfig.FieldPaths) > 0 || len(p.objectConfig.TagPaths) > 0) {
|
||||
result.IncludeCollection = p.existsInpathResults(result.Index)
|
||||
}
|
||||
result.ForEach(func(_, val gjson.Result) bool {
|
||||
m := metric.New(
|
||||
p.measurementName,
|
||||
make(map[string]string),
|
||||
make(map[string]interface{}),
|
||||
timestamp,
|
||||
)
|
||||
if val.IsObject() {
|
||||
n := result
|
||||
n.Metric = m
|
||||
n.Result = val
|
||||
n.Index = val.Index - result.Index
|
||||
n.ParentIndex = n.Index + result.ParentIndex
|
||||
r, err := p.combineObject(n, timestamp)
|
||||
if err != nil {
|
||||
p.Log.Error(err)
|
||||
return false
|
||||
}
|
||||
|
||||
results = append(results, r...)
|
||||
if len(results) != 0 {
|
||||
for _, newResult := range results {
|
||||
mergeMetric(result.Metric, newResult)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
mergeMetric(result.Metric, m)
|
||||
n := result
|
||||
n.Metric = m
|
||||
n.Result = val
|
||||
n.Index = val.Index - result.Index
|
||||
n.ParentIndex = n.Index + result.ParentIndex
|
||||
r, err := p.expandArray(n, timestamp)
|
||||
if err != nil {
|
||||
p.Log.Error(err)
|
||||
return false
|
||||
}
|
||||
results = append(results, r...)
|
||||
return true
|
||||
})
|
||||
} else {
|
||||
if p.objectConfig.TimestampKey != "" && result.SetName == p.objectConfig.TimestampKey {
|
||||
if p.objectConfig.TimestampFormat == "" {
|
||||
err := errors.New("use of 'timestamp_key' requires 'timestamp_format'")
|
||||
return nil, err
|
||||
}
|
||||
var loc *time.Location
|
||||
if p.objectConfig.TimestampTimezone != "" {
|
||||
var err error
|
||||
loc, err = time.LoadLocation(p.objectConfig.TimestampTimezone)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid timezone: %w", err)
|
||||
}
|
||||
}
|
||||
timestamp, err := internal.ParseTimestamp(p.objectConfig.TimestampFormat, result.String(), loc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Metric.SetTime(timestamp)
|
||||
} else {
|
||||
switch result.Value().(type) {
|
||||
case nil: // Ignore JSON values that are set as null
|
||||
default:
|
||||
outputName := result.OutputName
|
||||
desiredType := result.DesiredType
|
||||
|
||||
if len(p.objectConfig.FieldPaths) > 0 || len(p.objectConfig.TagPaths) > 0 {
|
||||
var pathResult *pathResult
|
||||
// When IncludeCollection isn't nil, that means the current result is included in the collection.
|
||||
if result.IncludeCollection != nil {
|
||||
pathResult = result.IncludeCollection
|
||||
} else {
|
||||
// Verify that the result should be included based on the results of fieldpaths and tag paths
|
||||
pathResult = p.existsInpathResults(result.ParentIndex)
|
||||
}
|
||||
if pathResult == nil {
|
||||
return results, nil
|
||||
}
|
||||
if pathResult.tag {
|
||||
result.Tag = true
|
||||
}
|
||||
if !pathResult.tag {
|
||||
desiredType = pathResult.Type
|
||||
}
|
||||
if pathResult.Rename != "" {
|
||||
outputName = pathResult.Rename
|
||||
}
|
||||
}
|
||||
|
||||
if result.Tag {
|
||||
desiredType = "string"
|
||||
}
|
||||
v, err := convertType(result.Result, desiredType, result.SetName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result.Tag {
|
||||
result.Metric.AddTag(outputName, v.(string))
|
||||
} else {
|
||||
result.Metric.AddField(outputName, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results = append(results, result.Metric)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (p *Parser) existsInpathResults(index int) *pathResult {
|
||||
for _, f := range p.subPathResults {
|
||||
if f.result.Index == index {
|
||||
return &f
|
||||
}
|
||||
|
||||
// Indexes will be populated with all the elements that match on a `#(...)#` query
|
||||
for _, i := range f.result.Indexes {
|
||||
if i == index {
|
||||
return &f
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// processObjects will iterate over all 'object' configs and create metrics for each
|
||||
func (p *Parser) processObjects(input []byte, objects []Object, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
p.iterateObjects = true
|
||||
var t []telegraf.Metric
|
||||
for _, c := range objects {
|
||||
p.objectConfig = c
|
||||
|
||||
if c.Path == "" {
|
||||
return nil, errors.New("the GJSON path is required")
|
||||
}
|
||||
|
||||
result := gjson.GetBytes(input, c.Path)
|
||||
if err := p.checkResult(result, c.Path); err != nil {
|
||||
if c.Optional {
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
scopedJSON := []byte(result.Raw)
|
||||
for _, f := range c.FieldPaths {
|
||||
var r pathResult
|
||||
r.result = gjson.GetBytes(scopedJSON, f.Path)
|
||||
if err := p.checkResult(r.result, f.Path); err != nil {
|
||||
if f.Optional {
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
r.DataSet = f
|
||||
p.subPathResults = append(p.subPathResults, r)
|
||||
}
|
||||
|
||||
for _, f := range c.TagPaths {
|
||||
var r pathResult
|
||||
r.result = gjson.GetBytes(scopedJSON, f.Path)
|
||||
if err := p.checkResult(r.result, f.Path); err != nil {
|
||||
if f.Optional {
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
r.DataSet = f
|
||||
r.tag = true
|
||||
p.subPathResults = append(p.subPathResults, r)
|
||||
}
|
||||
|
||||
rootObject := metricNode{
|
||||
Metric: metric.New(
|
||||
p.measurementName,
|
||||
make(map[string]string),
|
||||
make(map[string]interface{}),
|
||||
timestamp,
|
||||
),
|
||||
Result: result,
|
||||
ParentIndex: 0,
|
||||
}
|
||||
|
||||
metrics, err := p.expandArray(rootObject, timestamp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
t = append(t, metrics...)
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// combineObject will add all fields/tags to a single metric
|
||||
// If the object has multiple array's as elements it won't comine those, they will remain separate metrics
|
||||
func (p *Parser) combineObject(result metricNode, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
var results []telegraf.Metric
|
||||
if result.IsArray() || result.IsObject() {
|
||||
var err error
|
||||
result.ForEach(func(key, val gjson.Result) bool {
|
||||
// Determine if field/tag set name is configured
|
||||
var setName string
|
||||
if result.SetName != "" {
|
||||
setName = result.SetName + "_" + strings.ReplaceAll(key.String(), " ", "_")
|
||||
} else {
|
||||
setName = strings.ReplaceAll(key.String(), " ", "_")
|
||||
}
|
||||
|
||||
if p.isExcluded(setName) || !p.isIncluded(setName, val) {
|
||||
return true
|
||||
}
|
||||
|
||||
var outputName string
|
||||
if p.objectConfig.DisablePrependKeys {
|
||||
outputName = strings.ReplaceAll(key.String(), " ", "_")
|
||||
} else {
|
||||
outputName = setName
|
||||
}
|
||||
for k, n := range p.objectConfig.Renames {
|
||||
if k == setName {
|
||||
outputName = n
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
arrayNode := result
|
||||
arrayNode.ParentIndex += val.Index
|
||||
arrayNode.OutputName = outputName
|
||||
arrayNode.SetName = setName
|
||||
arrayNode.Result = val
|
||||
|
||||
for k, t := range p.objectConfig.Fields {
|
||||
if setName == k {
|
||||
arrayNode.DesiredType = t
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
tag := false
|
||||
for _, t := range p.objectConfig.Tags {
|
||||
if setName == t {
|
||||
tag = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
arrayNode.Tag = tag
|
||||
|
||||
if val.IsObject() {
|
||||
results, err = p.combineObject(arrayNode, timestamp)
|
||||
if err != nil {
|
||||
p.Log.Error(err)
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
arrayNode.Index -= result.Index
|
||||
arrayNode.ParentIndex -= result.Index
|
||||
r, err := p.expandArray(arrayNode, timestamp)
|
||||
if err != nil {
|
||||
p.Log.Error(err)
|
||||
return false
|
||||
}
|
||||
results = cartesianProduct(r, results)
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (p *Parser) isIncluded(key string, val gjson.Result) bool {
|
||||
if len(p.objectConfig.IncludedKeys) == 0 {
|
||||
return true
|
||||
}
|
||||
// automatically adds tags to included_keys so it does NOT have to be repeated in the config
|
||||
allKeys := append(p.objectConfig.IncludedKeys, p.objectConfig.Tags...)
|
||||
for _, i := range allKeys {
|
||||
if i == key {
|
||||
return true
|
||||
}
|
||||
if val.IsArray() || val.IsObject() {
|
||||
// Check if the included key is a sub element
|
||||
if strings.HasPrefix(i, key) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) isExcluded(key string) bool {
|
||||
for _, i := range p.objectConfig.ExcludedKeys {
|
||||
if i == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (*Parser) ParseLine(string) (telegraf.Metric, error) {
|
||||
return nil, errors.New("parsing line is not supported by JSON format")
|
||||
}
|
||||
|
||||
func (p *Parser) SetDefaultTags(tags map[string]string) {
|
||||
p.DefaultTags = tags
|
||||
}
|
||||
|
||||
// convertType will convert the value parsed from the input JSON to the specified type in the config
|
||||
func convertType(input gjson.Result, desiredType, name string) (interface{}, error) {
|
||||
switch inputType := input.Value().(type) {
|
||||
case string:
|
||||
switch desiredType {
|
||||
case "uint":
|
||||
r, err := strconv.ParseUint(inputType, 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to convert field %q to type uint: %w", name, err)
|
||||
}
|
||||
return r, nil
|
||||
case "int":
|
||||
r, err := strconv.ParseInt(inputType, 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to convert field %q to type int: %w", name, err)
|
||||
}
|
||||
return r, nil
|
||||
case "float":
|
||||
r, err := strconv.ParseFloat(inputType, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to convert field %q to type float: %w", name, err)
|
||||
}
|
||||
return r, nil
|
||||
case "bool":
|
||||
r, err := strconv.ParseBool(inputType)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to convert field %q to type bool: %w", name, err)
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
case bool:
|
||||
switch desiredType {
|
||||
case "string":
|
||||
return strconv.FormatBool(inputType), nil
|
||||
case "int":
|
||||
if inputType {
|
||||
return int64(1), nil
|
||||
}
|
||||
|
||||
return int64(0), nil
|
||||
case "uint":
|
||||
if inputType {
|
||||
return uint64(1), nil
|
||||
}
|
||||
|
||||
return uint64(0), nil
|
||||
}
|
||||
case float64:
|
||||
switch desiredType {
|
||||
case "string":
|
||||
return fmt.Sprint(inputType), nil
|
||||
case "int":
|
||||
return input.Int(), nil
|
||||
case "uint":
|
||||
return input.Uint(), nil
|
||||
case "bool":
|
||||
if inputType == 0 {
|
||||
return false, nil
|
||||
}
|
||||
if inputType == 1 {
|
||||
return true, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unable to convert field %q to type bool", name)
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown format '%T' for field %q", inputType, name)
|
||||
}
|
||||
|
||||
return input.Value(), nil
|
||||
}
|
||||
|
||||
// Check if gjson result exists and return error if it does not
|
||||
func (p *Parser) checkResult(result gjson.Result, path string) error {
|
||||
if !result.Exists() {
|
||||
p.Log.Debugf("the path %q doesn't exist", path)
|
||||
return fmt.Errorf("the path %q doesn't exist", path)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Register all variants
|
||||
parsers.Add("json_v2",
|
||||
func(defaultMetricName string) telegraf.Parser {
|
||||
return &Parser{DefaultMetricName: defaultMetricName}
|
||||
},
|
||||
)
|
||||
}
|
168
plugins/parsers/json_v2/parser_test.go
Normal file
168
plugins/parsers/json_v2/parser_test.go
Normal file
|
@ -0,0 +1,168 @@
|
|||
package json_v2_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/config"
|
||||
"github.com/influxdata/telegraf/plugins/inputs"
|
||||
"github.com/influxdata/telegraf/plugins/inputs/file"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
)
|
||||
|
||||
func TestMultipleConfigs(t *testing.T) {
|
||||
// Get all directories in testdata
|
||||
folders, err := os.ReadDir("testdata")
|
||||
require.NoError(t, err)
|
||||
// Make sure testdata contains data
|
||||
require.NotEmpty(t, folders)
|
||||
|
||||
// Setup influx parser for parsing the expected metrics
|
||||
parser := &influx.Parser{}
|
||||
require.NoError(t, parser.Init())
|
||||
|
||||
inputs.Add("file", func() telegraf.Input {
|
||||
return &file.File{}
|
||||
})
|
||||
|
||||
for _, f := range folders {
|
||||
// Only use directories as those contain test-cases
|
||||
if !f.IsDir() {
|
||||
continue
|
||||
}
|
||||
testdataPath := filepath.Join("testdata", f.Name())
|
||||
configFilename := filepath.Join(testdataPath, "telegraf.conf")
|
||||
expectedFilename := filepath.Join(testdataPath, "expected.out")
|
||||
expectedErrorFilename := filepath.Join(testdataPath, "expected.err")
|
||||
|
||||
t.Run(f.Name(), func(t *testing.T) {
|
||||
// Read the expected output
|
||||
expected, err := testutil.ParseMetricsFromFile(expectedFilename, parser)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Read the expected errors if any
|
||||
var expectedErrors []string
|
||||
if _, err := os.Stat(expectedErrorFilename); err == nil {
|
||||
var err error
|
||||
expectedErrors, err = testutil.ParseLinesFromFile(expectedErrorFilename)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, expectedErrors)
|
||||
}
|
||||
|
||||
// Configure the plugin
|
||||
cfg := config.NewConfig()
|
||||
require.NoError(t, cfg.LoadConfig(configFilename))
|
||||
|
||||
// Gather the metrics from the input file configure
|
||||
var acc testutil.Accumulator
|
||||
var actualErrorMsgs []string
|
||||
for _, input := range cfg.Inputs {
|
||||
require.NoError(t, input.Init())
|
||||
if err := input.Gather(&acc); err != nil {
|
||||
actualErrorMsgs = append(actualErrorMsgs, err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// If the test has expected error(s) then compare them
|
||||
if len(expectedErrors) > 0 {
|
||||
sort.Strings(actualErrorMsgs)
|
||||
sort.Strings(expectedErrors)
|
||||
for i, msg := range expectedErrors {
|
||||
require.Contains(t, actualErrorMsgs[i], msg)
|
||||
}
|
||||
} else {
|
||||
require.Empty(t, actualErrorMsgs)
|
||||
}
|
||||
|
||||
// Process expected metrics and compare with resulting metrics
|
||||
actual := acc.GetTelegrafMetrics()
|
||||
testutil.RequireMetricsEqual(t, expected, actual, testutil.IgnoreTime())
|
||||
|
||||
// Folder with timestamp prefixed will also check for matching timestamps to make sure they are parsed correctly
|
||||
// The milliseconds weren't matching, seemed like a rounding difference between the influx parser
|
||||
// Compares each metrics times separately and ignores milliseconds
|
||||
if strings.HasPrefix(f.Name(), "timestamp") {
|
||||
require.Len(t, actual, len(expected))
|
||||
for i, m := range actual {
|
||||
require.Equal(t, expected[i].Time().Truncate(time.Second), m.Time().Truncate(time.Second))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParserEmptyConfig(t *testing.T) {
|
||||
plugin := &json_v2.Parser{}
|
||||
require.ErrorContains(t, plugin.Init(), "no configuration provided")
|
||||
}
|
||||
|
||||
func BenchmarkParsingSequential(b *testing.B) {
|
||||
inputFilename := filepath.Join("testdata", "benchmark", "input.json")
|
||||
|
||||
// Configure the plugin
|
||||
plugin := &json_v2.Parser{
|
||||
Configs: []json_v2.Config{
|
||||
{
|
||||
MeasurementName: "benchmark",
|
||||
JSONObjects: []json_v2.Object{
|
||||
{
|
||||
Path: "metrics",
|
||||
DisablePrependKeys: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
require.NoError(b, plugin.Init())
|
||||
|
||||
// Read the input data
|
||||
input, err := os.ReadFile(inputFilename)
|
||||
require.NoError(b, err)
|
||||
|
||||
// Do the benchmarking
|
||||
for n := 0; n < b.N; n++ {
|
||||
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
|
||||
plugin.Parse(input)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParsingParallel(b *testing.B) {
|
||||
inputFilename := filepath.Join("testdata", "benchmark", "input.json")
|
||||
|
||||
// Configure the plugin
|
||||
plugin := &json_v2.Parser{
|
||||
Configs: []json_v2.Config{
|
||||
{
|
||||
MeasurementName: "benchmark",
|
||||
JSONObjects: []json_v2.Object{
|
||||
{
|
||||
Path: "metrics",
|
||||
DisablePrependKeys: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
require.NoError(b, plugin.Init())
|
||||
|
||||
// Read the input data
|
||||
input, err := os.ReadFile(inputFilename)
|
||||
require.NoError(b, err)
|
||||
|
||||
// Do the benchmarking
|
||||
b.RunParallel(func(p *testing.PB) {
|
||||
for p.Next() {
|
||||
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
|
||||
plugin.Parse(input)
|
||||
}
|
||||
})
|
||||
}
|
1
plugins/parsers/json_v2/testdata/10670/expected.out
vendored
Normal file
1
plugins/parsers/json_v2/testdata/10670/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
weight,customer_name=Customer,imei=123,serial_number=AX00 weight=289.799
|
10
plugins/parsers/json_v2/testdata/10670/input.json
vendored
Normal file
10
plugins/parsers/json_v2/testdata/10670/input.json
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"weight_ROWKEY": "123",
|
||||
"weight_serialNumber": "AX00",
|
||||
"weight_createdAt": 1644708158939,
|
||||
"weight_weight": 289.799,
|
||||
"sensor_imei": "123",
|
||||
"sensor_distributor_name": null,
|
||||
"sensor_customer_name": "Customer",
|
||||
"sensor_dist_name": null
|
||||
}
|
25
plugins/parsers/json_v2/testdata/10670/telegraf.conf
vendored
Normal file
25
plugins/parsers/json_v2/testdata/10670/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/10670/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "weight"
|
||||
timestamp_format = "unix_ms"
|
||||
timestamp_path = "weight_createdAt"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "weight_weight"
|
||||
rename = "weight"
|
||||
type = "float"
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "weight_serialNumber"
|
||||
rename = "serial_number"
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "weight_ROWKEY"
|
||||
rename = "imei"
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "sensor_customer_name"
|
||||
rename = "customer_name"
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "sensor_distributor_name"
|
||||
rename = "distributor_name"
|
2
plugins/parsers/json_v2/testdata/array_of_objects/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/array_of_objects/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
file properties_mag=5.17
|
||||
file properties_mag=6.2
|
14
plugins/parsers/json_v2/testdata/array_of_objects/input.json
vendored
Normal file
14
plugins/parsers/json_v2/testdata/array_of_objects/input.json
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"features": [
|
||||
{
|
||||
"properties": {
|
||||
"mag": 5.17
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"mag": 6.2
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
9
plugins/parsers/json_v2/testdata/array_of_objects/telegraf.conf
vendored
Normal file
9
plugins/parsers/json_v2/testdata/array_of_objects/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/array_of_objects/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "features"
|
||||
|
34
plugins/parsers/json_v2/testdata/benchmark/benchmark.json
vendored
Normal file
34
plugins/parsers/json_v2/testdata/benchmark/benchmark.json
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"metrics": [
|
||||
{
|
||||
"name": "impression",
|
||||
"timestamp": 1653643420,
|
||||
"fields": {
|
||||
"count_sum": 5
|
||||
},
|
||||
"tags": {
|
||||
"key": "12345",
|
||||
"flagname": "F5",
|
||||
"host": "1cbbb3796fc2",
|
||||
"platform": "Java",
|
||||
"sdkver": "4.9.1",
|
||||
"value": "false"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "expression",
|
||||
"timestamp": 1653646789,
|
||||
"fields": {
|
||||
"count_sum": 42
|
||||
},
|
||||
"tags": {
|
||||
"key": "67890",
|
||||
"flagname": "E42",
|
||||
"host": "klaus",
|
||||
"platform": "Golang",
|
||||
"sdkver": "1.18.3",
|
||||
"value": "true"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
2
plugins/parsers/json_v2/testdata/benchmark/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/benchmark/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
benchmark count_sum=5,flagname="F5",host="1cbbb3796fc2",key="12345",name="impression",platform="Java",sdkver="4.9.1",timestamp=1653643420,value="false" 1697730835574029794
|
||||
benchmark count_sum=42,flagname="E42",host="klaus",key="67890",name="expression",platform="Golang",sdkver="1.18.3",timestamp=1653646789,value="true" 1697730835574029794
|
34
plugins/parsers/json_v2/testdata/benchmark/input.json
vendored
Normal file
34
plugins/parsers/json_v2/testdata/benchmark/input.json
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"metrics": [
|
||||
{
|
||||
"name": "impression",
|
||||
"timestamp": 1653643420,
|
||||
"fields": {
|
||||
"count_sum": 5
|
||||
},
|
||||
"tags": {
|
||||
"key": "12345",
|
||||
"flagname": "F5",
|
||||
"host": "1cbbb3796fc2",
|
||||
"platform": "Java",
|
||||
"sdkver": "4.9.1",
|
||||
"value": "false"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "expression",
|
||||
"timestamp": 1653646789,
|
||||
"fields": {
|
||||
"count_sum": 42
|
||||
},
|
||||
"tags": {
|
||||
"key": "67890",
|
||||
"flagname": "E42",
|
||||
"host": "klaus",
|
||||
"platform": "Golang",
|
||||
"sdkver": "1.18.3",
|
||||
"value": "true"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
9
plugins/parsers/json_v2/testdata/benchmark/telegraf.conf
vendored
Normal file
9
plugins/parsers/json_v2/testdata/benchmark/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/benchmark/input.json"]
|
||||
data_format = "json_v2"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "benchmark"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = 'metrics'
|
||||
disable_prepend_keys = true
|
3
plugins/parsers/json_v2/testdata/complex_nesting/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/complex_nesting/expected.out
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
file,properties_place=Antelope\ Valley\,\ CA geometry_coordinates=-119.4998333,geometry_type="Point",id="nc73584926",properties_mag=6,properties_updated=1.626277167263e+12,type="Feature"
|
||||
file,properties_place=Antelope\ Valley\,\ CA geometry_coordinates=38.5075,geometry_type="Point",id="nc73584926",properties_mag=6,properties_updated=1.626277167263e+12,type="Feature"
|
||||
file,properties_place=Antelope\ Valley\,\ CA geometry_coordinates=7.45,geometry_type="Point",id="nc73584926",properties_mag=6,properties_updated=1.626277167263e+12,type="Feature"
|
31
plugins/parsers/json_v2/testdata/complex_nesting/input.json
vendored
Normal file
31
plugins/parsers/json_v2/testdata/complex_nesting/input.json
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"type": "FeatureCollection",
|
||||
"metadata": {
|
||||
"generated": 1626285886000,
|
||||
"url": "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/significant_week.geojson",
|
||||
"title": "USGS Significant Earthquakes, Past Week",
|
||||
"status": 200,
|
||||
"api": "1.10.3",
|
||||
"count": 1
|
||||
},
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"mag": 6,
|
||||
"place": "Antelope Valley, CA",
|
||||
"time": 1625784588110,
|
||||
"updated": 1626277167263
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
-119.4998333,
|
||||
38.5075,
|
||||
7.45
|
||||
]
|
||||
},
|
||||
"id": "nc73584926"
|
||||
}
|
||||
]
|
||||
}
|
9
plugins/parsers/json_v2/testdata/complex_nesting/telegraf.conf
vendored
Normal file
9
plugins/parsers/json_v2/testdata/complex_nesting/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/complex_nesting/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "features"
|
||||
timestamp_key = "properties_time"
|
||||
timestamp_format = "unix_ms"
|
||||
tags = ["properties_place"]
|
2
plugins/parsers/json_v2/testdata/fields_and_tags/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/fields_and_tags/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
file,status=200 duration=2i,json_duration=100
|
||||
file,status=200 duration=2i,json_duration=60
|
46
plugins/parsers/json_v2/testdata/fields_and_tags/input.json
vendored
Normal file
46
plugins/parsers/json_v2/testdata/fields_and_tags/input.json
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"message": "abc",
|
||||
"fields": {
|
||||
"status": 200,
|
||||
"key": 1,
|
||||
"json": [
|
||||
{
|
||||
"duration": 100,
|
||||
"code": 1,
|
||||
"label": 2,
|
||||
"line": 3,
|
||||
"many": 4,
|
||||
"more": 5,
|
||||
"numerical": 6,
|
||||
"fields": 7,
|
||||
"nest": {
|
||||
"label": 2,
|
||||
"line": 3,
|
||||
"many": 4,
|
||||
"more": 5,
|
||||
"numerical": 4,
|
||||
"fields": 7
|
||||
}
|
||||
},
|
||||
{
|
||||
"duration": 60,
|
||||
"code": 1,
|
||||
"label": 2,
|
||||
"line": 3,
|
||||
"many": 4,
|
||||
"more": 5,
|
||||
"numerical": 6,
|
||||
"fields": 7,
|
||||
"nest": {
|
||||
"label": 2,
|
||||
"line": 3,
|
||||
"many": 4,
|
||||
"more": 5,
|
||||
"numerical": 6,
|
||||
"fields": 7
|
||||
}
|
||||
}
|
||||
],
|
||||
"duration": 2
|
||||
}
|
||||
}
|
14
plugins/parsers/json_v2/testdata/fields_and_tags/telegraf.conf
vendored
Normal file
14
plugins/parsers/json_v2/testdata/fields_and_tags/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/1363
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/fields_and_tags/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "fields.status"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "fields.json.#.duration"
|
||||
rename = "json_duration"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "fields.duration"
|
||||
type = "int"
|
5
plugins/parsers/json_v2/testdata/fields_and_tags_complex/expected.out
vendored
Normal file
5
plugins/parsers/json_v2/testdata/fields_and_tags_complex/expected.out
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
bart_json_v2,name=Powell\ St. minutes=9i
|
||||
bart_json_v2,name=Powell\ St. minutes=40i
|
||||
bart_json_v2,name=Powell\ St. minutes=70i
|
||||
bart_json_v2,name=Powell\ St. minutes=12i
|
||||
bart_json_v2,name=Powell\ St. minutes=42i
|
87
plugins/parsers/json_v2/testdata/fields_and_tags_complex/input.json
vendored
Normal file
87
plugins/parsers/json_v2/testdata/fields_and_tags_complex/input.json
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"?xml": {
|
||||
"@version": "1.0",
|
||||
"@encoding": "utf-8"
|
||||
},
|
||||
"root": {
|
||||
"@id": "1",
|
||||
"uri": {
|
||||
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=POWL&json=y"
|
||||
},
|
||||
"date": "06/03/2021",
|
||||
"time": "09:46:01 AM PDT",
|
||||
"station": [
|
||||
{
|
||||
"name": "Powell St.",
|
||||
"abbr": "POWL",
|
||||
"etd": [
|
||||
{
|
||||
"destination": "Antioch",
|
||||
"abbreviation": "ANTC",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "9",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "40",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "70",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"destination": "Berryessa",
|
||||
"abbreviation": "BERY",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "12",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "GREEN",
|
||||
"hexcolor": "#339933",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "42",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "GREEN",
|
||||
"hexcolor": "#339933",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"message": ""
|
||||
}
|
||||
}
|
10
plugins/parsers/json_v2/testdata/fields_and_tags_complex/telegraf.conf
vendored
Normal file
10
plugins/parsers/json_v2/testdata/fields_and_tags_complex/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/fields_and_tags_complex/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "bart_json_v2"
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "root.station.#.name"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "root.station.#.etd.#.estimate.#.minutes"
|
||||
type = "int"
|
3
plugins/parsers/json_v2/testdata/large_numbers/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/large_numbers/expected.out
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
file large=4294967296i,larger=9007199254740991i,largest=9223372036854775807i
|
||||
file large=9007199254740991u,larger=9223372036854775807u,largest=18446744073709551615u
|
||||
file large=4294967296,larger=4.294967296663e+09,largest=9007199254740991
|
17
plugins/parsers/json_v2/testdata/large_numbers/input.json
vendored
Normal file
17
plugins/parsers/json_v2/testdata/large_numbers/input.json
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"int": {
|
||||
"large": 4294967296,
|
||||
"larger": 9007199254740991,
|
||||
"largest": 9223372036854775807
|
||||
},
|
||||
"uint": {
|
||||
"large": 9007199254740991,
|
||||
"larger": 9223372036854775807,
|
||||
"largest": 18446744073709551615
|
||||
},
|
||||
"float": {
|
||||
"large": 4294967296,
|
||||
"larger": 4.294967296663e+09,
|
||||
"largest": 9007199254740991
|
||||
}
|
||||
}
|
22
plugins/parsers/json_v2/testdata/large_numbers/telegraf.conf
vendored
Normal file
22
plugins/parsers/json_v2/testdata/large_numbers/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/large_numbers/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "int"
|
||||
[inputs.file.json_v2.object.fields]
|
||||
large = "int"
|
||||
larger = "int"
|
||||
largest = "int"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "uint"
|
||||
[inputs.file.json_v2.object.fields]
|
||||
large = "uint"
|
||||
larger = "uint"
|
||||
largest = "uint"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "float"
|
||||
[inputs.file.json_v2.object.fields]
|
||||
large = "float"
|
||||
larger = "float"
|
||||
largest = "float"
|
1
plugins/parsers/json_v2/testdata/measurement_name_int/expected.out
vendored
Normal file
1
plugins/parsers/json_v2/testdata/measurement_name_int/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
32 label="Basic"
|
19
plugins/parsers/json_v2/testdata/measurement_name_int/input.json
vendored
Normal file
19
plugins/parsers/json_v2/testdata/measurement_name_int/input.json
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"value_id": "52-32-1-0",
|
||||
"node_id": 52,
|
||||
"class_id": 32,
|
||||
"type": "byte",
|
||||
"genre": "basic",
|
||||
"instance": 1,
|
||||
"index": 0,
|
||||
"label": "Basic",
|
||||
"units": "",
|
||||
"help": "Basic status of the node",
|
||||
"read_only": false,
|
||||
"write_only": false,
|
||||
"min": 0,
|
||||
"max": 255,
|
||||
"is_polled": false,
|
||||
"value": 0,
|
||||
"lastUpdate": 1584636017962
|
||||
}
|
9
plugins/parsers/json_v2/testdata/measurement_name_int/telegraf.conf
vendored
Normal file
9
plugins/parsers/json_v2/testdata/measurement_name_int/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
# Example taken from: https://github.com/influxdata/feature-requests/issues/160
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/measurement_name_int/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name_path = "class_id"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "label"
|
1
plugins/parsers/json_v2/testdata/mix_field_and_object/expected.out
vendored
Normal file
1
plugins/parsers/json_v2/testdata/mix_field_and_object/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
openweather,id=2.643743e+06,name=London coord_lat=51.5085,coord_lon=-0.1257,description="few clouds",main_temp=12.54,summary="Clouds",wind_speed=2.11 1628186541000000000
|
44
plugins/parsers/json_v2/testdata/mix_field_and_object/input.json
vendored
Normal file
44
plugins/parsers/json_v2/testdata/mix_field_and_object/input.json
vendored
Normal file
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"coord": {
|
||||
"lon": -0.1257,
|
||||
"lat": 51.5085
|
||||
},
|
||||
"weather": [
|
||||
{
|
||||
"id": 801,
|
||||
"main": "Clouds",
|
||||
"description": "few clouds",
|
||||
"icon": "02n"
|
||||
}
|
||||
],
|
||||
"base": "stations",
|
||||
"main": {
|
||||
"temp": 12.54,
|
||||
"feels_like": 11.86,
|
||||
"temp_min": 10.49,
|
||||
"temp_max": 14.27,
|
||||
"pressure": 1024,
|
||||
"humidity": 77
|
||||
},
|
||||
"visibility": 10000,
|
||||
"wind": {
|
||||
"speed": 2.11,
|
||||
"deg": 254,
|
||||
"gust": 4.63
|
||||
},
|
||||
"clouds": {
|
||||
"all": 21
|
||||
},
|
||||
"dt": 1633545358,
|
||||
"sys": {
|
||||
"type": 2,
|
||||
"id": 2019646,
|
||||
"country": "GB",
|
||||
"sunrise": 1633500560,
|
||||
"sunset": 1633541256
|
||||
},
|
||||
"timezone": 3600,
|
||||
"id": 2643743,
|
||||
"name": "London",
|
||||
"cod": 200
|
||||
}
|
15
plugins/parsers/json_v2/testdata/mix_field_and_object/telegraf.conf
vendored
Normal file
15
plugins/parsers/json_v2/testdata/mix_field_and_object/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/mix_field_and_object/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "openweather"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "weather.#.main"
|
||||
rename = "summary"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "weather.#.description"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "@this"
|
||||
included_keys = ["coord_lat", "coord_lon", "main_temp", "wind_speed"] # List of JSON keys (for a nested key, prepend the parent keys with underscores) that should be only included in result
|
||||
tags = ["id", "name"] # List of JSON keys (for a nested key, prepend the parent keys with underscores) to be a tag instead of a field
|
||||
|
8
plugins/parsers/json_v2/testdata/multiple_arrays_in_object/expected.out
vendored
Normal file
8
plugins/parsers/json_v2/testdata/multiple_arrays_in_object/expected.out
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="A Long-expected Party",name="Bilbo",random=1,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Bilbo",random=1,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="A Long-expected Party",name="Frodo",random=1,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Frodo",random=1,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="A Long-expected Party",name="Bilbo",random=2,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Bilbo",random=2,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="A Long-expected Party",name="Frodo",random=2,species="hobbit"
|
||||
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Frodo",random=2,species="hobbit"
|
24
plugins/parsers/json_v2/testdata/multiple_arrays_in_object/input.json
vendored
Normal file
24
plugins/parsers/json_v2/testdata/multiple_arrays_in_object/input.json
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"book": {
|
||||
"title": "The Lord Of The Rings",
|
||||
"chapters": [
|
||||
"A Long-expected Party",
|
||||
"The Shadow of the Past"
|
||||
],
|
||||
"author": "Tolkien",
|
||||
"characters": [
|
||||
{
|
||||
"name": "Bilbo",
|
||||
"species": "hobbit"
|
||||
},
|
||||
{
|
||||
"name": "Frodo",
|
||||
"species": "hobbit"
|
||||
}
|
||||
],
|
||||
"random": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
}
|
||||
}
|
11
plugins/parsers/json_v2/testdata/multiple_arrays_in_object/telegraf.conf
vendored
Normal file
11
plugins/parsers/json_v2/testdata/multiple_arrays_in_object/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Example getting nested fields with duplicate names
|
||||
# Example taken from: https://github.com/influxdata/telegraf/issues/1363
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/multiple_arrays_in_object/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "book"
|
||||
tags = ["title"]
|
||||
disable_prepend_keys = true
|
2
plugins/parsers/json_v2/testdata/multiple_json_input/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/multiple_json_input/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
file,from_station=COLM,to_station=ANTC,etd_estimate_direction=North minutes=2i
|
||||
file,from_station=POWL,to_station=DALY,etd_estimate_direction=South minutes=6i
|
87
plugins/parsers/json_v2/testdata/multiple_json_input/input_1.json
vendored
Normal file
87
plugins/parsers/json_v2/testdata/multiple_json_input/input_1.json
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"?xml": {
|
||||
"@version": "1.0",
|
||||
"@encoding": "utf-8"
|
||||
},
|
||||
"root": {
|
||||
"@id": "1",
|
||||
"uri": {
|
||||
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=COLM&dir=n&json=y"
|
||||
},
|
||||
"date": "07/02/2021",
|
||||
"time": "06:05:47 PM PDT",
|
||||
"station": [
|
||||
{
|
||||
"name": "Colma",
|
||||
"abbr": "COLM",
|
||||
"etd": [
|
||||
{
|
||||
"destination": "Antioch",
|
||||
"abbreviation": "ANTC",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "2",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "16",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "31",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"destination": "Richmond",
|
||||
"abbreviation": "RICH",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "22",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "52",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"message": ""
|
||||
}
|
||||
}
|
134
plugins/parsers/json_v2/testdata/multiple_json_input/input_2.json
vendored
Normal file
134
plugins/parsers/json_v2/testdata/multiple_json_input/input_2.json
vendored
Normal file
|
@ -0,0 +1,134 @@
|
|||
{
|
||||
"?xml": {
|
||||
"@version": "1.0",
|
||||
"@encoding": "utf-8"
|
||||
},
|
||||
"root": {
|
||||
"@id": "1",
|
||||
"uri": {
|
||||
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=POWL&dir=s&json=y"
|
||||
},
|
||||
"date": "07/02/2021",
|
||||
"time": "06:06:01 PM PDT",
|
||||
"station": [
|
||||
{
|
||||
"name": "Powell St.",
|
||||
"abbr": "POWL",
|
||||
"etd": [
|
||||
{
|
||||
"destination": "Daly City",
|
||||
"abbreviation": "DALY",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "6",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "GREEN",
|
||||
"hexcolor": "#339933",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "26",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "9",
|
||||
"color": "BLUE",
|
||||
"hexcolor": "#0099cc",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "36",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "GREEN",
|
||||
"hexcolor": "#339933",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"destination": "Millbrae",
|
||||
"abbreviation": "MLBR",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "19",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "49",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "79",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"destination": "SF Airport",
|
||||
"abbreviation": "SFIA",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "7",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "37",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "67",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"message": ""
|
||||
}
|
||||
}
|
18
plugins/parsers/json_v2/testdata/multiple_json_input/telegraf.conf
vendored
Normal file
18
plugins/parsers/json_v2/testdata/multiple_json_input/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/multiple_json_input/input_1.json", "./testdata/multiple_json_input/input_2.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "root.station"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path="#.abbr"
|
||||
rename = "from_station"
|
||||
[[inputs.file.json_v2.object.field]]
|
||||
path = "#.etd.0.estimate.0.minutes"
|
||||
rename = "minutes"
|
||||
type = "int"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "#.etd.0.abbreviation"
|
||||
rename = "to_station"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "#.etd.0.estimate.0.direction"
|
2
plugins/parsers/json_v2/testdata/multiple_timestamps/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/multiple_timestamps/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
file name="fire" 1555745371450794118
|
||||
file name="flood" 1555745371450794118
|
12
plugins/parsers/json_v2/testdata/multiple_timestamps/input.json
vendored
Normal file
12
plugins/parsers/json_v2/testdata/multiple_timestamps/input.json
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"events": [
|
||||
{
|
||||
"name": "fire",
|
||||
"time": "1555745371410"
|
||||
},
|
||||
{
|
||||
"name": "flood",
|
||||
"time": "1555745371410"
|
||||
}
|
||||
]
|
||||
}
|
10
plugins/parsers/json_v2/testdata/multiple_timestamps/telegraf.conf
vendored
Normal file
10
plugins/parsers/json_v2/testdata/multiple_timestamps/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/multiple_timestamps/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "events"
|
||||
timestamp_key = "time"
|
||||
timestamp_format = "unix_ms"
|
12
plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/expected.out
vendored
Normal file
12
plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/expected.out
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
file,hostname=testhost1,outputname=1A-CC01-PC01 systemVoltage=-54.1,systemCurrent=-3.8
|
||||
file,hostname=testhost1,outputname=2A-CC01-KA01 systemVoltage=-54.1,systemCurrent=-3.8
|
||||
file,hostname=testhost1,outputname=3A-CC01-CC02 systemVoltage=-54.1,systemCurrent=-3.8
|
||||
file,hostname=testhost1,outputname=4A systemVoltage=-54.1,systemCurrent=-3.8
|
||||
file,hostname=testhost1,outputname=5A systemVoltage=-54.1,systemCurrent=-3.8
|
||||
file,hostname=testhost1,outputname=6A-CC01-88-INV01-A systemVoltage=-54.1,systemCurrent=-3.8
|
||||
file,hostname=testhost2,outputname=1A systemVoltage=27.5,systemCurrent=9.5
|
||||
file,hostname=testhost2,outputname=2A systemVoltage=27.5,systemCurrent=9.5
|
||||
file,hostname=testhost2,outputname=3A systemVoltage=27.5,systemCurrent=9.5
|
||||
file,hostname=testhost2,outputname=4A systemVoltage=27.5,systemCurrent=9.5
|
||||
file,hostname=testhost2,outputname=5A systemVoltage=27.5,systemCurrent=9.5
|
||||
file,hostname=testhost2,outputname=6A systemVoltage=27.5,systemCurrent=9.5
|
174
plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/input.json
vendored
Normal file
174
plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/input.json
vendored
Normal file
|
@ -0,0 +1,174 @@
|
|||
[
|
||||
{
|
||||
"hostname": "testhost1",
|
||||
"systemVoltage": -54.1,
|
||||
"systemCurrent": -3.8,
|
||||
"tables": [
|
||||
{
|
||||
"outputnumber": 0.0,
|
||||
"outputname": "1A-CC01-PC01",
|
||||
"outputcurrent": -2.7,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 1.0,
|
||||
"outputname": "2A-CC01-KA01",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 2.0,
|
||||
"outputname": "3A-CC01-CC02",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 3.0,
|
||||
"outputname": "4A",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 4.0,
|
||||
"outputname": "5A",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 5.0,
|
||||
"outputname": "6A-CC01-88-INV01-A",
|
||||
"outputcurrent": -1.1,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"busnumber": 0.0,
|
||||
"busname": "A--48A",
|
||||
"busvoltage": -54.1,
|
||||
"buscurrent": -3.8
|
||||
},
|
||||
{
|
||||
"busnumber": 1.0,
|
||||
"busname": "B--48B",
|
||||
"busvoltage": -53.9,
|
||||
"buscurrent": -4.2
|
||||
},
|
||||
{
|
||||
"alarmnumber": 0.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 1.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 2.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 3.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 4.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"hostname": "testhost2",
|
||||
"systemVoltage": 27.5,
|
||||
"systemCurrent": 9.5,
|
||||
"tables": [
|
||||
{
|
||||
"outputnumber": 0.0,
|
||||
"outputname": "1A",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 1.0,
|
||||
"outputname": "2A",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 2.0,
|
||||
"outputname": "3A",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 3.0,
|
||||
"outputname": "4A",
|
||||
"outputcurrent": 0.6,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 4.0,
|
||||
"outputname": "5A",
|
||||
"outputcurrent": 6.5,
|
||||
"outputfusestatus": 1.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"outputnumber": 5.0,
|
||||
"outputname": "6A",
|
||||
"outputcurrent": 0.0,
|
||||
"outputfusestatus": 2.0,
|
||||
"outputenable": 1.0
|
||||
},
|
||||
{
|
||||
"busnumber": 0.0,
|
||||
"busname": "A-24V",
|
||||
"busvoltage": 27.6,
|
||||
"buscurrent": 0.6
|
||||
},
|
||||
{
|
||||
"busnumber": 1.0,
|
||||
"busname": "B-12V",
|
||||
"busvoltage": 13.8,
|
||||
"buscurrent": 0.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 0.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 1.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 2.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 3.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
},
|
||||
{
|
||||
"alarmnumber": 4.0,
|
||||
"alarmname": "\u0000",
|
||||
"alarmstatus": 1.0
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
16
plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/telegraf.conf
vendored
Normal file
16
plugins/parsers/json_v2/testdata/nested_and_nonnested_tags/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/6437
|
||||
|
||||
# Parse String types from JSON
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/nested_and_nonnested_tags/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
disable_prepend_keys = true
|
||||
path = "@this"
|
||||
included_keys = [
|
||||
"systemVoltage",
|
||||
"systemCurrent",
|
||||
"tables",
|
||||
]
|
||||
tags = ["hostname", "tables_outputname"]
|
2
plugins/parsers/json_v2/testdata/nested_array_of_objects/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/nested_array_of_objects/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
new_metric,name=partition LogEndOffset=339238i,LogStartOffset=339238i,NumLogSegments=1i,Size=0i,UnderReplicatedPartitions=0i 1610056029037925000
|
||||
new_metric,name=partition LogEndOffset=33914i,LogStartOffset=33238i,NumLogSegments=1i,Size=2i,UnderReplicatedPartitions=5i 1610056029037956000
|
36
plugins/parsers/json_v2/testdata/nested_array_of_objects/input.json
vendored
Normal file
36
plugins/parsers/json_v2/testdata/nested_array_of_objects/input.json
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
[
|
||||
{
|
||||
"data": {
|
||||
"LogEndOffset": 339238,
|
||||
"LogStartOffset": 339238,
|
||||
"NumLogSegments": 1,
|
||||
"Size": 0,
|
||||
"UnderReplicatedPartitions": 0
|
||||
},
|
||||
"name": "partition",
|
||||
"tags": {
|
||||
"host": "CUD1-001559",
|
||||
"jolokia_agent_url": "http://localhost:7777/jolokia",
|
||||
"partition": "1",
|
||||
"topic": "qa-kafka-connect-logs"
|
||||
},
|
||||
"timestamp": 1591124461
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"LogEndOffset": 33914,
|
||||
"LogStartOffset": 33238,
|
||||
"NumLogSegments": 1,
|
||||
"Size": 2,
|
||||
"UnderReplicatedPartitions": 5
|
||||
},
|
||||
"name": "partition",
|
||||
"tags": {
|
||||
"host": "CUD1-001559",
|
||||
"jolokia_agent_url": "http://localhost:7777/jolokia",
|
||||
"partition": "1",
|
||||
"topic": "qa-kafka-connect-logs"
|
||||
},
|
||||
"timestamp": 1591124461
|
||||
}
|
||||
]
|
15
plugins/parsers/json_v2/testdata/nested_array_of_objects/telegraf.conf
vendored
Normal file
15
plugins/parsers/json_v2/testdata/nested_array_of_objects/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
# Example taken from: https://github.com/influxdata/feature-requests/issues/160
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/nested_array_of_objects/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "new_metric"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "@this"
|
||||
disable_prepend_keys = true
|
||||
excluded_keys = ["tags", "timestamp"]
|
||||
tags = ["name"]
|
||||
[inputs.file.json_v2.object.fields]
|
||||
data = "int"
|
||||
|
5
plugins/parsers/json_v2/testdata/nested_objects_optional/expected.out
vendored
Normal file
5
plugins/parsers/json_v2/testdata/nested_objects_optional/expected.out
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
file,datatype=float,name=inlet-1/temperature value_f=0.20791169081775931 1695679023000000000
|
||||
file,datatype=float value_f=-0.3090169943749477,name="inlet-1/temperature" 1695679077000000000
|
||||
file,datatype=string value_s="-0.3090169943749477",name="inlet-3/flow" 1695679077000000000
|
||||
file,datatype=int value_i=95i,name="inlet-2/temperature" 1695679077000000000
|
||||
file,datatype=bool value_b=true,name="inlet-2/flow" 1695679077000000000
|
25
plugins/parsers/json_v2/testdata/nested_objects_optional/nested_objects_nest.json
vendored
Normal file
25
plugins/parsers/json_v2/testdata/nested_objects_optional/nested_objects_nest.json
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"timestamp": 1695679077118.0,
|
||||
"metrics": [
|
||||
{
|
||||
"value": -0.3090169943749477,
|
||||
"name": "inlet-1/temperature",
|
||||
"datatype": "float"
|
||||
},
|
||||
{
|
||||
"value": 95,
|
||||
"name": "inlet-2/temperature",
|
||||
"datatype": "int"
|
||||
},
|
||||
{
|
||||
"value": true,
|
||||
"name": "inlet-2/flow",
|
||||
"datatype": "bool"
|
||||
},
|
||||
{
|
||||
"value": "-0.3090169943749477",
|
||||
"name": "inlet-3/flow",
|
||||
"datatype": "string"
|
||||
}
|
||||
]
|
||||
}
|
6
plugins/parsers/json_v2/testdata/nested_objects_optional/nested_objects_single.json
vendored
Normal file
6
plugins/parsers/json_v2/testdata/nested_objects_optional/nested_objects_single.json
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"timestamp": 1695679022882.0,
|
||||
"value": 0.20791169081775931,
|
||||
"name": "inlet-1/temperature",
|
||||
"datatype": "float"
|
||||
}
|
58
plugins/parsers/json_v2/testdata/nested_objects_optional/telegraf.conf
vendored
Normal file
58
plugins/parsers/json_v2/testdata/nested_objects_optional/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,58 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/13990
|
||||
|
||||
# Parse String types from JSON
|
||||
[[inputs.file]]
|
||||
|
||||
files = ["./testdata/nested_objects_optional/nested_objects_single.json", "./testdata/nested_objects_optional/nested_objects_nest.json"]
|
||||
data_format = "json_v2"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
timestamp_path = 'timestamp'
|
||||
timestamp_format = 'unix_ms'
|
||||
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = 'name'
|
||||
optional = true
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = 'datatype'
|
||||
optional = true
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = 'value'
|
||||
rename = 'value_f'
|
||||
optional = true
|
||||
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = 'metrics.#(datatype=="float")#'
|
||||
optional = true
|
||||
tags = ['datatype']
|
||||
[inputs.file.json_v2.object.renames]
|
||||
value = 'value_f'
|
||||
[inputs.file.json_v2.object.fields]
|
||||
value = 'float'
|
||||
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = 'metrics.#(datatype=="string")#'
|
||||
optional = true
|
||||
tags = ['datatype']
|
||||
[inputs.file.json_v2.object.renames]
|
||||
value = 'value_s'
|
||||
[inputs.file.json_v2.object.fields]
|
||||
value = 'string'
|
||||
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = 'metrics.#(datatype=="int")#'
|
||||
optional = true
|
||||
tags = ['datatype']
|
||||
[inputs.file.json_v2.object.renames]
|
||||
value = 'value_i'
|
||||
[inputs.file.json_v2.object.fields]
|
||||
value = 'int'
|
||||
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = 'metrics.#(datatype=="bool")#'
|
||||
optional = true
|
||||
tags = ['datatype']
|
||||
[inputs.file.json_v2.object.renames]
|
||||
value = 'value_b'
|
||||
[inputs.file.json_v2.object.fields]
|
||||
value = 'bool'
|
2
plugins/parsers/json_v2/testdata/nested_tags/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/nested_tags/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
file,Firmware=LDGSW07G,Model=WDC\ WUH721414ALE604,Serial=9JHNGTUT Count=0,Errors=0
|
||||
file,Firmware=LDGSW07G,Model=WDC\ WUH721414ALE604,Serial=9JHLPW9T Errors=0,Count=0
|
16
plugins/parsers/json_v2/testdata/nested_tags/input.json
vendored
Normal file
16
plugins/parsers/json_v2/testdata/nested_tags/input.json
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"device0": {
|
||||
"Count": 0,
|
||||
"Errors": 0,
|
||||
"Serial": "9JHNGTUT",
|
||||
"Model": "WDC WUH721414ALE604",
|
||||
"Firmware": "LDGSW07G"
|
||||
},
|
||||
"device1": {
|
||||
"Count": 0,
|
||||
"Errors": 0,
|
||||
"Serial": "9JHLPW9T",
|
||||
"Model": "WDC WUH721414ALE604",
|
||||
"Firmware": "LDGSW07G"
|
||||
}
|
||||
}
|
12
plugins/parsers/json_v2/testdata/nested_tags/telegraf.conf
vendored
Normal file
12
plugins/parsers/json_v2/testdata/nested_tags/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/6853
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/nested_tags/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "device0"
|
||||
tags = ["Firmware", "Model", "Serial"]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "device1"
|
||||
tags = ["Firmware", "Model", "Serial"]
|
3
plugins/parsers/json_v2/testdata/nested_tags_complex/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/nested_tags_complex/expected.out
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
file,Firmware=LDGSW07G,Model=WDC\ WUH721414ALE604,Serial=9JHNGTUT Count=0,Errors=0
|
||||
file,Firmware=LDGSW07G,Model=WDC\ WUH721414ALE604,Serial=9JHNGHJBT Errors=0,Count=2
|
||||
file,Firmware=LDGSW07G,Model=WDC\ WUH721414ALE604,Serial=9JHLPW9T Errors=0,Count=0
|
35
plugins/parsers/json_v2/testdata/nested_tags_complex/input.json
vendored
Normal file
35
plugins/parsers/json_v2/testdata/nested_tags_complex/input.json
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"Group A": [
|
||||
{
|
||||
"Sub-group 1": [
|
||||
{
|
||||
"Count": 0,
|
||||
"Errors": 0,
|
||||
"Serial": "9JHNGTUT",
|
||||
"Model": "WDC WUH721414ALE604",
|
||||
"Firmware": "LDGSW07G"
|
||||
},
|
||||
{
|
||||
"Count": 2,
|
||||
"Errors": 0,
|
||||
"Serial": "9JHNGHJBT",
|
||||
"Model": "WDC WUH721414ALE604",
|
||||
"Firmware": "LDGSW07G"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"Group B": [
|
||||
{
|
||||
"Sub-group 1": [
|
||||
{
|
||||
"Count": 0,
|
||||
"Errors": 0,
|
||||
"Serial": "9JHLPW9T",
|
||||
"Model": "WDC WUH721414ALE604",
|
||||
"Firmware": "LDGSW07G"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
14
plugins/parsers/json_v2/testdata/nested_tags_complex/telegraf.conf
vendored
Normal file
14
plugins/parsers/json_v2/testdata/nested_tags_complex/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/6853
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/nested_tags_complex/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "Group A"
|
||||
disable_prepend_keys = true
|
||||
tags = ["Sub-group_1_Firmware", "Sub-group_1_Model", "Sub-group_1_Serial"]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "Group B"
|
||||
disable_prepend_keys = true
|
||||
tags = ["Sub-group_1_Firmware", "Sub-group_1_Model", "Sub-group_1_Serial"]
|
1
plugins/parsers/json_v2/testdata/null/expected.out
vendored
Normal file
1
plugins/parsers/json_v2/testdata/null/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
file,id=ak0217l8ue0x,type=Feature detail="https://earthquake.usgs.gov/earthquakes/feed/v1.0/detail/ak0217l8ue0x.geojson",mag=1.5,place="63 km N of Petersville, Alaska",status="automatic",time=1623708726566,updated=1623709998223,url="https://earthquake.usgs.gov/earthquakes/eventpage/ak0217l8ue0x"
|
40
plugins/parsers/json_v2/testdata/null/input.json
vendored
Normal file
40
plugins/parsers/json_v2/testdata/null/input.json
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"type": "FeatureCollection",
|
||||
"metadata": {
|
||||
"generated": 1623710450000,
|
||||
"url": "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_hour.geojson",
|
||||
"title": "USGS All Earthquakes, Past Hour",
|
||||
"status": 200,
|
||||
"api": "1.10.3",
|
||||
"count": 10
|
||||
},
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"mag": 1.5,
|
||||
"place": "63 km N of Petersville, Alaska",
|
||||
"time": 1623708726566,
|
||||
"updated": 1623709998223,
|
||||
"tz": null,
|
||||
"url": "https://earthquake.usgs.gov/earthquakes/eventpage/ak0217l8ue0x",
|
||||
"detail": "https://earthquake.usgs.gov/earthquakes/feed/v1.0/detail/ak0217l8ue0x.geojson",
|
||||
"felt": null,
|
||||
"cdi": null,
|
||||
"mmi": null,
|
||||
"alert": null,
|
||||
"status": "automatic"
|
||||
},
|
||||
"id": "ak0217l8ue0x"
|
||||
}
|
||||
],
|
||||
"bbox": [
|
||||
-157.5749,
|
||||
32.9001667,
|
||||
0.25,
|
||||
-115.6211667,
|
||||
66.331,
|
||||
132.5
|
||||
]
|
||||
}
|
||||
|
8
plugins/parsers/json_v2/testdata/null/telegraf.conf
vendored
Normal file
8
plugins/parsers/json_v2/testdata/null/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/null/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "features"
|
||||
tags = ["type", "id"]
|
||||
disable_prepend_keys = true
|
5
plugins/parsers/json_v2/testdata/object/expected.out
vendored
Normal file
5
plugins/parsers/json_v2/testdata/object/expected.out
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
bart_json_v2,destination=Antioch,name=Colma minutes=13i
|
||||
bart_json_v2,destination=Antioch,name=Colma minutes=43i
|
||||
bart_json_v2,destination=Millbrae,name=Colma minutes=19i
|
||||
bart_json_v2,destination=Millbrae,name=Colma minutes=49i
|
||||
bart_json_v2,destination=Millbrae,name=Colma minutes=79i
|
87
plugins/parsers/json_v2/testdata/object/input.json
vendored
Normal file
87
plugins/parsers/json_v2/testdata/object/input.json
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"?xml": {
|
||||
"@version": "1.0",
|
||||
"@encoding": "utf-8"
|
||||
},
|
||||
"root": {
|
||||
"@id": "1",
|
||||
"uri": {
|
||||
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=COLM&json=y"
|
||||
},
|
||||
"date": "06/03/2021",
|
||||
"time": "12:54:31 PM PDT",
|
||||
"station": [
|
||||
{
|
||||
"name": "Colma",
|
||||
"abbr": "COLM",
|
||||
"etd": [
|
||||
{
|
||||
"destination": "Antioch",
|
||||
"abbreviation": "ANTC",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "13",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "43",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"destination": "Millbrae",
|
||||
"abbreviation": "MLBR",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "19",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "49",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "79",
|
||||
"platform": "1",
|
||||
"direction": "South",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"message": ""
|
||||
}
|
||||
}
|
12
plugins/parsers/json_v2/testdata/object/telegraf.conf
vendored
Normal file
12
plugins/parsers/json_v2/testdata/object/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/object/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "bart_json_v2"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "root.station"
|
||||
disable_prepend_keys = true
|
||||
included_keys = ["etd_estimate_minutes"]
|
||||
tags = ["name", "etd_destination"]
|
||||
[inputs.file.json_v2.object.fields]
|
||||
etd_estimate_minutes = "int"
|
5
plugins/parsers/json_v2/testdata/object_multiple/expected.out
vendored
Normal file
5
plugins/parsers/json_v2/testdata/object_multiple/expected.out
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
api_http_status_codes 200=11586,201=16,202=14,204=8,404=43,500=0,503=0 1741532840501119000
|
||||
api_requests requests=11668 1741532840501225000
|
||||
api_responses responses=11667 1741532840501263000
|
||||
api_av sessions_added=0,sessions_removed=0 1741544068286646000
|
||||
|
21
plugins/parsers/json_v2/testdata/object_multiple/input.json
vendored
Normal file
21
plugins/parsers/json_v2/testdata/object_multiple/input.json
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
"api": {
|
||||
"http_status_codes": {
|
||||
"200": 11586,
|
||||
"201": 16,
|
||||
"202": 14,
|
||||
"204": 8,
|
||||
"404": 43,
|
||||
"500": 0,
|
||||
"503": 0
|
||||
},
|
||||
"requests": 11668,
|
||||
"responses": 11667,
|
||||
"av": {
|
||||
"sessions": {
|
||||
"added": 0,
|
||||
"removed": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
23
plugins/parsers/json_v2/testdata/object_multiple/telegraf.conf
vendored
Normal file
23
plugins/parsers/json_v2/testdata/object_multiple/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/object_multiple/input.json"]
|
||||
data_format = "json_v2"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "api_http_status_codes"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "api.http_status_codes"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "api_requests"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "api.requests"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "api_responses"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "api.responses"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "api_av"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "api.av"
|
3
plugins/parsers/json_v2/testdata/object_timestamp/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/object_timestamp/expected.out
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
metric,id=100 value=100.123 1609459199000000000
|
||||
metric,id=101 value=200.001 1276605000000000000
|
||||
metric,id=102 value=999.999 946684800000000000
|
19
plugins/parsers/json_v2/testdata/object_timestamp/input.json
vendored
Normal file
19
plugins/parsers/json_v2/testdata/object_timestamp/input.json
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"events": [
|
||||
{
|
||||
"id": 100,
|
||||
"value": 100.123,
|
||||
"time": "2020-12-31T23:59:59Z"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"value": 200.001,
|
||||
"time": "2010-06-15T12:30:00Z"
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"value": 999.999,
|
||||
"time": "2000-01-01T00:00:00Z"
|
||||
}
|
||||
]
|
||||
}
|
12
plugins/parsers/json_v2/testdata/object_timestamp/telegraf.conf
vendored
Normal file
12
plugins/parsers/json_v2/testdata/object_timestamp/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/object_timestamp/input.json"]
|
||||
data_format = "json_v2"
|
||||
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "metric"
|
||||
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "events"
|
||||
tags = ["id"]
|
||||
timestamp_key = "time"
|
||||
timestamp_format = "2006-01-02T15:04:05Z"
|
0
plugins/parsers/json_v2/testdata/optional/expected.out
vendored
Normal file
0
plugins/parsers/json_v2/testdata/optional/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/optional/input.json
vendored
Normal file
3
plugins/parsers/json_v2/testdata/optional/input.json
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"test": "test"
|
||||
}
|
15
plugins/parsers/json_v2/testdata/optional/telegraf.conf
vendored
Normal file
15
plugins/parsers/json_v2/testdata/optional/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/7097
|
||||
|
||||
# Parse String types from JSON
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/optional/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "wrong"
|
||||
optional = true
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "wrong"
|
||||
optional = true
|
||||
|
||||
|
3
plugins/parsers/json_v2/testdata/optional_objects/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/optional_objects/expected.out
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
file,Meter_number=12344 Power_curr=168,Total_in=2769.492 1646326975000000000
|
||||
file Humidity=63.3,Pressure=1020.6,Temperature=22.5 1646335555000000000
|
||||
file Illuminance=2.022 1646335555000000000
|
1
plugins/parsers/json_v2/testdata/optional_objects/input_1.json
vendored
Normal file
1
plugins/parsers/json_v2/testdata/optional_objects/input_1.json
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"Time":"2022-03-03T17:02:55","MT681":{"Power_curr":168,"Total_in":2769.492,"Meter_number":"12344"}}
|
1
plugins/parsers/json_v2/testdata/optional_objects/input_2.json
vendored
Normal file
1
plugins/parsers/json_v2/testdata/optional_objects/input_2.json
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"Time":"2022-03-03T19:25:55","BME280":{"Temperature":22.5,"Humidity":63.3,"DewPoint":15.1,"Pressure":1020.6,"SeaPressure":1024.1},"TSL2561":{"Illuminance":2.022,"IR":16,"Broadband":78},"PressureUnit":"hPa","TempUnit":"C"}
|
21
plugins/parsers/json_v2/testdata/optional_objects/telegraf.conf
vendored
Normal file
21
plugins/parsers/json_v2/testdata/optional_objects/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
# Parse different objects
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/optional_objects/input_1.json", "./testdata/optional_objects/input_2.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
timestamp_path = "Time"
|
||||
timestamp_format = "2006-01-02T15:04:05"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "BME280"
|
||||
included_keys = ["Humidity", "Pressure", "Temperature"]
|
||||
optional = true
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "TSL2561"
|
||||
included_keys = ["Illuminance"]
|
||||
optional = true
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "MT681"
|
||||
optional = true
|
||||
tags = ["Meter_number"]
|
||||
|
||||
|
1
plugins/parsers/json_v2/testdata/subfieldtag_in_object/expected.out
vendored
Normal file
1
plugins/parsers/json_v2/testdata/subfieldtag_in_object/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
file,from_station=COLM,to_station=ANTC,etd_estimate_direction=North etd_estimate_minutes=6i
|
97
plugins/parsers/json_v2/testdata/subfieldtag_in_object/input.json
vendored
Normal file
97
plugins/parsers/json_v2/testdata/subfieldtag_in_object/input.json
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"?xml": {
|
||||
"@version": "1.0",
|
||||
"@encoding": "utf-8"
|
||||
},
|
||||
"root": {
|
||||
"@id": "1",
|
||||
"uri": {
|
||||
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=COLM&dir=n&json=y"
|
||||
},
|
||||
"date": "06/25/2021",
|
||||
"time": "05:01:31 PM PDT",
|
||||
"station": [
|
||||
{
|
||||
"name": "Colma",
|
||||
"abbr": "COLM",
|
||||
"etd": [
|
||||
{
|
||||
"destination": "Antioch",
|
||||
"abbreviation": "ANTC",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "6",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "36",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "51",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "YELLOW",
|
||||
"hexcolor": "#ffff33",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"destination": "Richmond",
|
||||
"abbreviation": "RICH",
|
||||
"limited": "0",
|
||||
"estimate": [
|
||||
{
|
||||
"minutes": "12",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "26",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
},
|
||||
{
|
||||
"minutes": "41",
|
||||
"platform": "2",
|
||||
"direction": "North",
|
||||
"length": "10",
|
||||
"color": "RED",
|
||||
"hexcolor": "#ff0000",
|
||||
"bikeflag": "1",
|
||||
"delay": "0"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"message": ""
|
||||
}
|
||||
}
|
17
plugins/parsers/json_v2/testdata/subfieldtag_in_object/telegraf.conf
vendored
Normal file
17
plugins/parsers/json_v2/testdata/subfieldtag_in_object/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/subfieldtag_in_object/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "root.station"
|
||||
[[inputs.file.json_v2.object.field]]
|
||||
path = "#.etd.0.estimate.0.minutes"
|
||||
type = "int"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "#.abbr"
|
||||
rename = "from_station"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "#.etd.0.abbreviation"
|
||||
rename = "to_station"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "#.etd.0.estimate.0.direction"
|
4
plugins/parsers/json_v2/testdata/subfieldtag_in_object_2/expected.out
vendored
Normal file
4
plugins/parsers/json_v2/testdata/subfieldtag_in_object_2/expected.out
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
file,data=3 cnt=23i,format=0i
|
||||
file,data=7 cnt=23i,format=0i
|
||||
file,data=10 cnt=23i,format=0i
|
||||
file,data=23 cnt=23i,format=0i
|
10
plugins/parsers/json_v2/testdata/subfieldtag_in_object_2/input.json
vendored
Normal file
10
plugins/parsers/json_v2/testdata/subfieldtag_in_object_2/input.json
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"cnt": 23,
|
||||
"data": [
|
||||
3,
|
||||
7,
|
||||
10,
|
||||
23
|
||||
],
|
||||
"format": 0
|
||||
}
|
16
plugins/parsers/json_v2/testdata/subfieldtag_in_object_2/telegraf.conf
vendored
Normal file
16
plugins/parsers/json_v2/testdata/subfieldtag_in_object_2/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/subfieldtag_in_object_2/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "@this"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "data"
|
||||
[[inputs.file.json_v2.object.field]]
|
||||
path = "cnt"
|
||||
type = "int"
|
||||
[[inputs.file.json_v2.object.field]]
|
||||
path = "format"
|
||||
type = "int"
|
4
plugins/parsers/json_v2/testdata/timestamp/expected.out
vendored
Normal file
4
plugins/parsers/json_v2/testdata/timestamp/expected.out
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
file,name=temperature,units=℃ value=23.4 1555745371450794118
|
||||
file,name=moisture,units=% value=5 1555745371450794118
|
||||
file,name=light,units=lux value=10118 1555745371450794118
|
||||
file,name=fertility,units=us/cm value=0 1555745371450794118
|
25
plugins/parsers/json_v2/testdata/timestamp/input.json
vendored
Normal file
25
plugins/parsers/json_v2/testdata/timestamp/input.json
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"time": 1555745371410,
|
||||
"measurements": [
|
||||
{
|
||||
"name": "temperature",
|
||||
"value": 23.4,
|
||||
"units": "℃"
|
||||
},
|
||||
{
|
||||
"name": "moisture",
|
||||
"value": 5,
|
||||
"units": "%"
|
||||
},
|
||||
{
|
||||
"name": "light",
|
||||
"value": 10118,
|
||||
"units": "lux"
|
||||
},
|
||||
{
|
||||
"name": "fertility",
|
||||
"value": 0,
|
||||
"units": "us/cm"
|
||||
}
|
||||
]
|
||||
}
|
11
plugins/parsers/json_v2/testdata/timestamp/telegraf.conf
vendored
Normal file
11
plugins/parsers/json_v2/testdata/timestamp/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/timestamp/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
timestamp_path = "time"
|
||||
timestamp_format = "unix_ms"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "measurements"
|
||||
tags = ["name", "units"]
|
2
plugins/parsers/json_v2/testdata/timestamp_ns/expected.out
vendored
Normal file
2
plugins/parsers/json_v2/testdata/timestamp_ns/expected.out
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
test value=0 1631202459121654321
|
||||
test value=1 1631202459121654321
|
7
plugins/parsers/json_v2/testdata/timestamp_ns/input.json
vendored
Normal file
7
plugins/parsers/json_v2/testdata/timestamp_ns/input.json
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"test": [
|
||||
{ "value": 0 },
|
||||
{ "value": 1 }
|
||||
],
|
||||
"timestamp": 1631202459121654321
|
||||
}
|
11
plugins/parsers/json_v2/testdata/timestamp_ns/telegraf.conf
vendored
Normal file
11
plugins/parsers/json_v2/testdata/timestamp_ns/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/timestamp_ns/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
measurement_name = "test"
|
||||
timestamp_path = "timestamp"
|
||||
timestamp_format = "unix_ns"
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "test"
|
1
plugins/parsers/json_v2/testdata/timestamp_rfc3339/expected.out
vendored
Normal file
1
plugins/parsers/json_v2/testdata/timestamp_rfc3339/expected.out
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
file f="value" 1644434944000000000
|
4
plugins/parsers/json_v2/testdata/timestamp_rfc3339/input.json
vendored
Normal file
4
plugins/parsers/json_v2/testdata/timestamp_rfc3339/input.json
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"when": "2022-02-09T19:29:04Z",
|
||||
"f": "value"
|
||||
}
|
8
plugins/parsers/json_v2/testdata/timestamp_rfc3339/telegraf.conf
vendored
Normal file
8
plugins/parsers/json_v2/testdata/timestamp_rfc3339/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
[[inputs.file]]
|
||||
files = ["./testdata/timestamp_rfc3339/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
timestamp_path = "when"
|
||||
timestamp_format = "rfc3339"
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "f"
|
4
plugins/parsers/json_v2/testdata/types/expected.out
vendored
Normal file
4
plugins/parsers/json_v2/testdata/types/expected.out
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
file explicitstringtypeName="Bilbo",defaultstringtypeName="Baggins",convertbooltostringName="true",convertinttostringName="1",convertfloattostringName="1.1"
|
||||
file defaultinttypeName=2,convertfloatointName=3i,convertstringtointName=4i,convertbooltointName=0i,explicitinttypeName=1i,uinttype=1u
|
||||
file convertstringtofloatName=4.1,explicitfloattypeName=1.1,defaultfloattypeName=2.1,convertintotfloatName=3
|
||||
file explicitbooltypeName=true,defaultbooltypeName=false,convertinttoboolName=true,convertstringtoboolName=false,convertintstringtoboolTrueName=true,convertintstringtoboolFalseName=false
|
22
plugins/parsers/json_v2/testdata/types/input.json
vendored
Normal file
22
plugins/parsers/json_v2/testdata/types/input.json
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"explicitstringtype": "Bilbo",
|
||||
"defaultstringtype": "Baggins",
|
||||
"convertbooltostring": true,
|
||||
"convertinttostring": 1,
|
||||
"convertfloattostring": 1.1,
|
||||
"explicitinttype": 1,
|
||||
"defaultinttype": 2,
|
||||
"convertfloatoint": 3.1,
|
||||
"convertstringtoint": "4",
|
||||
"convertbooltoint": false,
|
||||
"explicitfloattype": 1.1,
|
||||
"defaultfloattype": 2.1,
|
||||
"convertintotfloat": 3,
|
||||
"convertstringtofloat": "4.1",
|
||||
"explicitbooltype": true,
|
||||
"defaultbooltype": false,
|
||||
"convertinttobool": 1,
|
||||
"convertstringtobool": "false",
|
||||
"convertintstringtoboolTrue": "1",
|
||||
"convertintstringtoboolFalse": "0"
|
||||
}
|
105
plugins/parsers/json_v2/testdata/types/telegraf.conf
vendored
Normal file
105
plugins/parsers/json_v2/testdata/types/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,105 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/7097
|
||||
|
||||
# Parse String types from JSON
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/types/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "explicitstringtypeName"
|
||||
path = "explicitstringtype"
|
||||
type = "string"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "defaultstringtypeName"
|
||||
path = "defaultstringtype"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertbooltostringName"
|
||||
path = "convertbooltostring"
|
||||
type = "string"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertinttostringName"
|
||||
path = "convertinttostring"
|
||||
type = "string"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertfloattostringName"
|
||||
path = "convertfloattostring"
|
||||
type = "string"
|
||||
|
||||
# Parse int types from JSON
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/types/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "explicitinttypeName"
|
||||
path = "explicitinttype"
|
||||
type = "int"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "uinttype"
|
||||
path = "explicitinttype"
|
||||
type = "uint"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "defaultinttypeName"
|
||||
path = "defaultinttype"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertfloatointName"
|
||||
path = "convertfloatoint"
|
||||
type = "int"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertstringtointName"
|
||||
path = "convertstringtoint"
|
||||
type = "int"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertbooltointName"
|
||||
path = "convertbooltoint"
|
||||
type = "int"
|
||||
|
||||
# Parse float types from JSON
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/types/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "explicitfloattypeName"
|
||||
path = "explicitfloattype"
|
||||
type = "float"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "defaultfloattypeName"
|
||||
path = "defaultfloattype"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertintotfloatName"
|
||||
path = "convertintotfloat"
|
||||
type = "float"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertstringtofloatName"
|
||||
path = "convertstringtofloat"
|
||||
type = "float"
|
||||
|
||||
# Parse bool types from JSON
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/types/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "explicitbooltypeName"
|
||||
path = "explicitbooltype"
|
||||
type = "bool"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "defaultbooltypeName"
|
||||
path = "defaultbooltype"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertinttoboolName"
|
||||
path = "convertinttobool"
|
||||
type = "bool"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertstringtoboolName"
|
||||
path = "convertstringtobool"
|
||||
type = "bool"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertintstringtoboolTrueName"
|
||||
path = "convertintstringtoboolTrue"
|
||||
type = "bool"
|
||||
[[inputs.file.json_v2.field]]
|
||||
rename = "convertintstringtoboolFalseName"
|
||||
path = "convertintstringtoboolFalse"
|
||||
type = "bool"
|
6
plugins/parsers/json_v2/testdata/wrong_path/expected.err
vendored
Normal file
6
plugins/parsers/json_v2/testdata/wrong_path/expected.err
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
the path "wrong" doesn't exist
|
||||
the path "wrong" doesn't exist
|
||||
the path "wrong" doesn't exist
|
||||
the timestamp path "wrong" returned NULL
|
||||
the path "wrong" doesn't exist
|
||||
the path "wrong" doesn't exist
|
0
plugins/parsers/json_v2/testdata/wrong_path/expected.out
vendored
Normal file
0
plugins/parsers/json_v2/testdata/wrong_path/expected.out
vendored
Normal file
3
plugins/parsers/json_v2/testdata/wrong_path/input.json
vendored
Normal file
3
plugins/parsers/json_v2/testdata/wrong_path/input.json
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"correct": "test"
|
||||
}
|
46
plugins/parsers/json_v2/testdata/wrong_path/telegraf.conf
vendored
Normal file
46
plugins/parsers/json_v2/testdata/wrong_path/telegraf.conf
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Example taken from: https://github.com/influxdata/telegraf/issues/7097
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/wrong_path/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "wrong"
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/wrong_path/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "correct"
|
||||
[[inputs.file.json_v2.object.tag]]
|
||||
path = "wrong"
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/wrong_path/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.object]]
|
||||
path = "correct"
|
||||
[[inputs.file.json_v2.object.field]]
|
||||
path = "wrong"
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/wrong_path/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
timestamp_path = "wrong"
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/wrong_path/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.tag]]
|
||||
path = "wrong"
|
||||
|
||||
[[inputs.file]]
|
||||
files = ["./testdata/wrong_path/input.json"]
|
||||
data_format = "json_v2"
|
||||
[[inputs.file.json_v2]]
|
||||
[[inputs.file.json_v2.field]]
|
||||
path = "wrong"
|
Loading…
Add table
Add a link
Reference in a new issue