1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,69 @@
# CSV Serializer
The `csv` output data format converts metrics into CSV lines.
## Configuration
```toml
[[outputs.file]]
## Files to write to, "stdout" is a specially handled file.
files = ["stdout", "/tmp/metrics.out"]
## Data format to output.
## Each data format has its own unique set of configuration options, read
## more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_OUTPUT.md
data_format = "csv"
## The default timestamp format is Unix epoch time.
# Other timestamp layout can be configured using the Go language time
# layout specification from https://golang.org/pkg/time/#Time.Format
# e.g.: csv_timestamp_format = "2006-01-02T15:04:05Z07:00"
# csv_timestamp_format = "unix"
## The default separator for the CSV format.
# csv_separator = ","
## Output the CSV header in the first line.
## Enable the header when outputting metrics to a new file.
## Disable when appending to a file or when using a stateless
## output to prevent headers appearing between data lines.
# csv_header = false
## Prefix tag and field columns with "tag_" and "field_" respectively.
## This can be helpful if you need to know the "type" of a column.
# csv_column_prefix = false
## Use the specified order for the columns.
## This can be helpful if you need a specific output order. To specify tags,
## use a `tag.` prefix, for fields use a `field.` prefix and use `name` and
## `timestamp` to reference the measurement name and timestamp respectively.
## NOTE: The output will only contain the specified tags, fields, etc. All
## other data will be dropped. In case a tag or field does not exist,
## the column will be empty.
## ex. csv_columns = ["timestamp", "tag.host", "field.value"]
##
## By default all metric data will be written in the order:
## timestamp, name, tags..., fields...
## with tags and fields being ordered alphabetically.
# csv_columns = []
```
## Examples
Standard form:
```csv
1458229140,docker,raynor,30,4,...,59,660
```
When an output plugin needs to emit multiple metrics at one time, it may use
the batch format. The use of batch format is determined by the plugin,
reference the documentation for the specific plugin. With `csv_header = true`
you get
```csv
timestamp,measurement,host,field_1,field_2,...,field_N,n_images
1458229140,docker,raynor,30,4,...,59,660
1458229143,docker,raynor,28,5,...,60,665
```

View file

@ -0,0 +1,245 @@
package csv
import (
"bytes"
"encoding/csv"
"fmt"
"runtime"
"sort"
"strconv"
"strings"
"time"
"unicode/utf8"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/serializers"
)
type Serializer struct {
TimestampFormat string `toml:"csv_timestamp_format"`
Separator string `toml:"csv_separator"`
Header bool `toml:"csv_header"`
Prefix bool `toml:"csv_column_prefix"`
Columns []string `toml:"csv_columns"`
buffer bytes.Buffer
writer *csv.Writer
}
func (s *Serializer) Init() error {
// Setting defaults
if s.Separator == "" {
s.Separator = ","
}
// Check inputs
if len(s.Separator) > 1 {
return fmt.Errorf("invalid separator %q", s.Separator)
}
switch s.TimestampFormat {
case "":
s.TimestampFormat = "unix"
case "unix", "unix_ms", "unix_us", "unix_ns":
default:
if time.Now().Format(s.TimestampFormat) == s.TimestampFormat {
return fmt.Errorf("invalid timestamp format %q", s.TimestampFormat)
}
}
// Check columns if any
for _, name := range s.Columns {
switch {
case name == "timestamp", name == "name",
strings.HasPrefix(name, "tag."),
strings.HasPrefix(name, "field."):
default:
return fmt.Errorf("invalid column reference %q", name)
}
}
// Initialize the writer
s.writer = csv.NewWriter(&s.buffer)
s.writer.Comma, _ = utf8.DecodeRuneInString(s.Separator)
s.writer.UseCRLF = runtime.GOOS == "windows"
return nil
}
func (s *Serializer) Serialize(metric telegraf.Metric) ([]byte, error) {
return s.SerializeBatch([]telegraf.Metric{metric})
}
func (s *Serializer) SerializeBatch(metrics []telegraf.Metric) ([]byte, error) {
if len(metrics) < 1 {
return nil, nil
}
// Clear the buffer
s.buffer.Truncate(0)
// Write the header if the user wants us to
if s.Header {
if len(s.Columns) > 0 {
if err := s.writeHeaderOrdered(); err != nil {
return nil, fmt.Errorf("writing header failed: %w", err)
}
} else {
if err := s.writeHeader(metrics[0]); err != nil {
return nil, fmt.Errorf("writing header failed: %w", err)
}
}
s.Header = false
}
for _, m := range metrics {
if len(s.Columns) > 0 {
if err := s.writeDataOrdered(m); err != nil {
return nil, fmt.Errorf("writing data failed: %w", err)
}
} else {
if err := s.writeData(m); err != nil {
return nil, fmt.Errorf("writing data failed: %w", err)
}
}
}
// Finish up
s.writer.Flush()
return s.buffer.Bytes(), nil
}
func (s *Serializer) writeHeader(metric telegraf.Metric) error {
columns := []string{
"timestamp",
"measurement",
}
for _, tag := range metric.TagList() {
if s.Prefix {
columns = append(columns, "tag_"+tag.Key)
} else {
columns = append(columns, tag.Key)
}
}
// Sort the fields by name
sort.Slice(metric.FieldList(), func(i, j int) bool {
return metric.FieldList()[i].Key < metric.FieldList()[j].Key
})
for _, field := range metric.FieldList() {
if s.Prefix {
columns = append(columns, "field_"+field.Key)
} else {
columns = append(columns, field.Key)
}
}
return s.writer.Write(columns)
}
func (s *Serializer) writeHeaderOrdered() error {
columns := make([]string, 0, len(s.Columns))
for _, name := range s.Columns {
if s.Prefix {
name = strings.ReplaceAll(name, ".", "_")
} else {
name = strings.TrimPrefix(name, "tag.")
name = strings.TrimPrefix(name, "field.")
}
columns = append(columns, name)
}
return s.writer.Write(columns)
}
func (s *Serializer) writeData(metric telegraf.Metric) error {
var timestamp string
// Format the time
switch s.TimestampFormat {
case "unix":
timestamp = strconv.FormatInt(metric.Time().Unix(), 10)
case "unix_ms":
timestamp = strconv.FormatInt(metric.Time().UnixNano()/1_000_000, 10)
case "unix_us":
timestamp = strconv.FormatInt(metric.Time().UnixNano()/1_000, 10)
case "unix_ns":
timestamp = strconv.FormatInt(metric.Time().UnixNano(), 10)
default:
timestamp = metric.Time().UTC().Format(s.TimestampFormat)
}
columns := []string{
timestamp,
metric.Name(),
}
for _, tag := range metric.TagList() {
columns = append(columns, tag.Value)
}
// Sort the fields by name
sort.Slice(metric.FieldList(), func(i, j int) bool {
return metric.FieldList()[i].Key < metric.FieldList()[j].Key
})
for _, field := range metric.FieldList() {
v, err := internal.ToString(field.Value)
if err != nil {
return fmt.Errorf("converting field %q to string failed: %w", field.Key, err)
}
columns = append(columns, v)
}
return s.writer.Write(columns)
}
func (s *Serializer) writeDataOrdered(metric telegraf.Metric) error {
var timestamp string
// Format the time
switch s.TimestampFormat {
case "unix":
timestamp = strconv.FormatInt(metric.Time().Unix(), 10)
case "unix_ms":
timestamp = strconv.FormatInt(metric.Time().UnixNano()/1_000_000, 10)
case "unix_us":
timestamp = strconv.FormatInt(metric.Time().UnixNano()/1_000, 10)
case "unix_ns":
timestamp = strconv.FormatInt(metric.Time().UnixNano(), 10)
default:
timestamp = metric.Time().UTC().Format(s.TimestampFormat)
}
columns := make([]string, 0, len(s.Columns))
for _, name := range s.Columns {
switch {
case name == "timestamp":
columns = append(columns, timestamp)
case name == "name":
columns = append(columns, metric.Name())
case strings.HasPrefix(name, "tag."):
v, _ := metric.GetTag(strings.TrimPrefix(name, "tag."))
columns = append(columns, v)
case strings.HasPrefix(name, "field."):
var v string
field := strings.TrimPrefix(name, "field.")
if raw, ok := metric.GetField(field); ok {
var err error
v, err = internal.ToString(raw)
if err != nil {
return fmt.Errorf("converting field %q to string failed: %w", field, err)
}
}
columns = append(columns, v)
}
}
return s.writer.Write(columns)
}
func init() {
serializers.Add("csv",
func() telegraf.Serializer {
return &Serializer{}
},
)
}

View file

@ -0,0 +1,261 @@
package csv
import (
"bytes"
"os"
"path/filepath"
"strings"
"testing"
"github.com/influxdata/toml"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf/plugins/parsers/influx"
"github.com/influxdata/telegraf/plugins/serializers"
"github.com/influxdata/telegraf/testutil"
)
func TestInvalidTimestampFormat(t *testing.T) {
s := Serializer{
TimestampFormat: "garbage",
}
require.EqualError(t, s.Init(), `invalid timestamp format "garbage"`)
}
func TestInvalidSeparator(t *testing.T) {
s := Serializer{
Separator: "garbage",
}
require.EqualError(t, s.Init(), `invalid separator "garbage"`)
s = Serializer{
Separator: "\n",
}
require.NoError(t, s.Init())
_, err := s.Serialize(testutil.TestMetric(42.3, "test"))
require.EqualError(t, err, "writing data failed: csv: invalid field or comment delimiter")
}
func TestSerializeTransformationNonBatch(t *testing.T) {
var tests = []struct {
name string
filename string
}{
{
name: "basic",
filename: "testcases/basic.conf",
},
{
name: "unix nanoseconds timestamp",
filename: "testcases/nanoseconds.conf",
},
{
name: "header",
filename: "testcases/header.conf",
},
{
name: "header with prefix",
filename: "testcases/prefix.conf",
},
{
name: "header and RFC3339 timestamp",
filename: "testcases/rfc3339.conf",
},
{
name: "header and semicolon",
filename: "testcases/semicolon.conf",
},
{
name: "ordered without header",
filename: "testcases/ordered.conf",
},
{
name: "ordered with header",
filename: "testcases/ordered_with_header.conf",
},
{
name: "ordered with header and prefix",
filename: "testcases/ordered_with_header_prefix.conf",
},
{
name: "ordered non-existing fields and tags",
filename: "testcases/ordered_not_exist.conf",
},
}
parser := &influx.Parser{}
require.NoError(t, parser.Init())
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
filename := filepath.FromSlash(tt.filename)
cfg, header, err := loadTestConfiguration(filename)
require.NoError(t, err)
// Get the input metrics
metrics, err := testutil.ParseMetricsFrom(header, "Input:", parser)
require.NoError(t, err)
// Get the expectations
expectedFn, err := testutil.ParseRawLinesFrom(header, "Output File:")
require.NoError(t, err)
require.Len(t, expectedFn, 1, "only a single output file is supported")
expected, err := loadCSV(expectedFn[0])
require.NoError(t, err)
// Serialize
serializer := Serializer{
TimestampFormat: cfg.TimestampFormat,
Separator: cfg.Separator,
Header: cfg.Header,
Prefix: cfg.Prefix,
Columns: cfg.Columns,
}
require.NoError(t, serializer.Init())
// expected results use LF endings
serializer.writer.UseCRLF = false
var actual bytes.Buffer
for _, m := range metrics {
buf, err := serializer.Serialize(m)
require.NoError(t, err)
_, err = actual.ReadFrom(bytes.NewReader(buf))
require.NoError(t, err)
}
// Compare
require.EqualValues(t, string(expected), actual.String())
})
}
}
func TestSerializeTransformationBatch(t *testing.T) {
var tests = []struct {
name string
filename string
}{
{
name: "basic",
filename: "testcases/basic.conf",
},
{
name: "unix nanoseconds timestamp",
filename: "testcases/nanoseconds.conf",
},
{
name: "header",
filename: "testcases/header.conf",
},
{
name: "header with prefix",
filename: "testcases/prefix.conf",
},
{
name: "header and RFC3339 timestamp",
filename: "testcases/rfc3339.conf",
},
{
name: "header and semicolon",
filename: "testcases/semicolon.conf",
},
{
name: "ordered without header",
filename: "testcases/ordered.conf",
},
{
name: "ordered with header",
filename: "testcases/ordered_with_header.conf",
},
{
name: "ordered with header and prefix",
filename: "testcases/ordered_with_header_prefix.conf",
},
{
name: "ordered non-existing fields and tags",
filename: "testcases/ordered_not_exist.conf",
},
}
parser := &influx.Parser{}
require.NoError(t, parser.Init())
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
filename := filepath.FromSlash(tt.filename)
cfg, header, err := loadTestConfiguration(filename)
require.NoError(t, err)
// Get the input metrics
metrics, err := testutil.ParseMetricsFrom(header, "Input:", parser)
require.NoError(t, err)
// Get the expectations
expectedFn, err := testutil.ParseRawLinesFrom(header, "Output File:")
require.NoError(t, err)
require.Len(t, expectedFn, 1, "only a single output file is supported")
expected, err := loadCSV(expectedFn[0])
require.NoError(t, err)
// Serialize
serializer := Serializer{
TimestampFormat: cfg.TimestampFormat,
Separator: cfg.Separator,
Header: cfg.Header,
Prefix: cfg.Prefix,
Columns: cfg.Columns,
}
require.NoError(t, serializer.Init())
// expected results use LF endings
serializer.writer.UseCRLF = false
actual, err := serializer.SerializeBatch(metrics)
require.NoError(t, err)
// Compare
require.EqualValues(t, string(expected), string(actual))
})
}
}
type Config Serializer
func loadTestConfiguration(filename string) (*Config, []string, error) {
buf, err := os.ReadFile(filename)
if err != nil {
return nil, nil, err
}
header := make([]string, 0)
for _, line := range strings.Split(string(buf), "\n") {
line = strings.TrimSpace(line)
if strings.HasPrefix(line, "#") {
header = append(header, line)
}
}
var cfg Config
err = toml.Unmarshal(buf, &cfg)
return &cfg, header, err
}
func loadCSV(filename string) ([]byte, error) {
return os.ReadFile(filename)
}
func BenchmarkSerialize(b *testing.B) {
s := &Serializer{}
require.NoError(b, s.Init())
metrics := serializers.BenchmarkMetrics(b)
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := s.Serialize(metrics[i%len(metrics)])
require.NoError(b, err)
}
}
func BenchmarkSerializeBatch(b *testing.B) {
s := &Serializer{}
require.NoError(b, s.Init())
m := serializers.BenchmarkMetrics(b)
metrics := m[:]
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := s.SerializeBatch(metrics)
require.NoError(b, err)
}
}

View file

@ -0,0 +1,8 @@
# Example for outputting CSV
#
# Output File:
# testcases/basic.csv
#
# Input:
# impression,flagname=F5,host=1cbbb3796fc2,key=12345,platform=Java,sdkver=4.9.1,value=false count_sum=5i 1653643420000000000
# expression,flagname=E42,host=klaus,key=67890,platform=Golang,sdkver=1.18.3,value=true count_sum=42i 1653646789000000000

View file

@ -0,0 +1,2 @@
1653643420,impression,F5,1cbbb3796fc2,12345,Java,4.9.1,false,5
1653646789,expression,E42,klaus,67890,Golang,1.18.3,true,42
1 1653643420 impression F5 1cbbb3796fc2 12345 Java 4.9.1 false 5
2 1653646789 expression E42 klaus 67890 Golang 1.18.3 true 42

View file

@ -0,0 +1,10 @@
# Example for outputting CSV in non-batch mode.
#
# Output File:
# testcases/header.csv
#
# Input:
# impression,flagname=F5,host=1cbbb3796fc2,key=12345,platform=Java,sdkver=4.9.1,value=false count_sum=5i 1653643420000000000
# expression,flagname=E42,host=klaus,key=67890,platform=Golang,sdkver=1.18.3,value=true count_sum=42i 1653646789000000000
csv_header = true

View file

@ -0,0 +1,3 @@
timestamp,measurement,flagname,host,key,platform,sdkver,value,count_sum
1653643420,impression,F5,1cbbb3796fc2,12345,Java,4.9.1,false,5
1653646789,expression,E42,klaus,67890,Golang,1.18.3,true,42
1 timestamp measurement flagname host key platform sdkver value count_sum
2 1653643420 impression F5 1cbbb3796fc2 12345 Java 4.9.1 false 5
3 1653646789 expression E42 klaus 67890 Golang 1.18.3 true 42

View file

@ -0,0 +1,10 @@
# Example for outputting CSV
#
# Output File:
# testcases/nanoseconds.csv
#
# Input:
# impression,flagname=F5,host=1cbbb3796fc2,key=12345,platform=Java,sdkver=4.9.1,value=false count_sum=5i 1653643420123456
# expression,flagname=E42,host=klaus,key=67890,platform=Golang,sdkver=1.18.3,value=true count_sum=42i 1653646789789012
csv_timestamp_format = "unix_ns"

View file

@ -0,0 +1,2 @@
1653643420123456,impression,F5,1cbbb3796fc2,12345,Java,4.9.1,false,5
1653646789789012,expression,E42,klaus,67890,Golang,1.18.3,true,42
1 1653643420123456 impression F5 1cbbb3796fc2 12345 Java 4.9.1 false 5
2 1653646789789012 expression E42 klaus 67890 Golang 1.18.3 true 42

View file

@ -0,0 +1,11 @@
# Example for outputting CSV with a specified column order
#
# Output File:
# testcases/ordered.csv
#
# Input:
# mymetric,machine=A1,host=1cbbb3796fc2 pressure=987.5,temperature=23.7,hours=15i 1653643420000000000
# mymetric,machine=X9,host=83d2e491ca01 pressure=1022.6,temperature=39.9,hours=231i 1653646789000000000
csv_timestamp_format = "unix_ns"
csv_columns = ["timestamp", "field.temperature", "field.pressure", "tag.machine"]

View file

@ -0,0 +1,2 @@
1653643420000000000,23.7,987.5,A1
1653646789000000000,39.9,1022.6,X9
1 1653643420000000000 23.7 987.5 A1
2 1653646789000000000 39.9 1022.6 X9

View file

@ -0,0 +1,12 @@
# Example for outputting CSV with a specified column order
#
# Output File:
# testcases/ordered_not_exist.csv
#
# Input:
# mymetric,machine=A1,host=1cbbb3796fc2 pressure=987.5,temperature=23.7,hours=15i 1653643420000000000
# mymetric,machine=X9,host=83d2e491ca01 status="healthy",pressure=1022.6,temperature=39.9,hours=231i 1653646789000000000
csv_timestamp_format = "unix_ns"
csv_header = true
csv_columns = ["timestamp", "field.temperature", "field.pressure", "field.status", "tag.location", "tag.machine"]

View file

@ -0,0 +1,3 @@
timestamp,temperature,pressure,status,location,machine
1653643420000000000,23.7,987.5,,,A1
1653646789000000000,39.9,1022.6,healthy,,X9
1 timestamp temperature pressure status location machine
2 1653643420000000000 23.7 987.5 A1
3 1653646789000000000 39.9 1022.6 healthy X9

View file

@ -0,0 +1,12 @@
# Example for outputting CSV with a specified column order
#
# Output File:
# testcases/ordered_with_header.csv
#
# Input:
# mymetric,machine=A1,host=1cbbb3796fc2 pressure=987.5,temperature=23.7,hours=15i 1653643420000000000
# mymetric,machine=X9,host=83d2e491ca01 pressure=1022.6,temperature=39.9,hours=231i 1653646789000000000
csv_timestamp_format = "unix_ns"
csv_header = true
csv_columns = ["timestamp", "field.temperature", "field.pressure", "tag.machine"]

View file

@ -0,0 +1,3 @@
timestamp,temperature,pressure,machine
1653643420000000000,23.7,987.5,A1
1653646789000000000,39.9,1022.6,X9
1 timestamp temperature pressure machine
2 1653643420000000000 23.7 987.5 A1
3 1653646789000000000 39.9 1022.6 X9

View file

@ -0,0 +1,13 @@
# Example for outputting CSV with a specified column order
#
# Output File:
# testcases/ordered_with_header_prefix.csv
#
# Input:
# mymetric,machine=A1,host=1cbbb3796fc2 pressure=987.5,temperature=23.7,hours=15i 1653643420000000000
# mymetric,machine=X9,host=83d2e491ca01 pressure=1022.6,temperature=39.9,hours=231i 1653646789000000000
csv_timestamp_format = "unix_ns"
csv_header = true
csv_column_prefix = true
csv_columns = ["timestamp", "field.temperature", "field.pressure", "tag.machine"]

View file

@ -0,0 +1,3 @@
timestamp,field_temperature,field_pressure,tag_machine
1653643420000000000,23.7,987.5,A1
1653646789000000000,39.9,1022.6,X9
1 timestamp field_temperature field_pressure tag_machine
2 1653643420000000000 23.7 987.5 A1
3 1653646789000000000 39.9 1022.6 X9

View file

@ -0,0 +1,11 @@
# Example for outputting CSV in non-batch mode.
#
# Output File:
# testcases/prefix.csv
#
# Input:
# impression,flagname=F5,host=1cbbb3796fc2,key=12345,platform=Java,sdkver=4.9.1,value=false count_sum=5i 1653643420000000000
# expression,flagname=E42,host=klaus,key=67890,platform=Golang,sdkver=1.18.3,value=true count_sum=42i 1653646789000000000
csv_header = true
csv_column_prefix = true

View file

@ -0,0 +1,3 @@
timestamp,measurement,tag_flagname,tag_host,tag_key,tag_platform,tag_sdkver,tag_value,field_count_sum
1653643420,impression,F5,1cbbb3796fc2,12345,Java,4.9.1,false,5
1653646789,expression,E42,klaus,67890,Golang,1.18.3,true,42
1 timestamp measurement tag_flagname tag_host tag_key tag_platform tag_sdkver tag_value field_count_sum
2 1653643420 impression F5 1cbbb3796fc2 12345 Java 4.9.1 false 5
3 1653646789 expression E42 klaus 67890 Golang 1.18.3 true 42

View file

@ -0,0 +1,11 @@
# Example for outputting CSV in non-batch mode.
#
# Output File:
# testcases/rfc3339.csv
#
# Input:
# impression,flagname=F5,host=1cbbb3796fc2,key=12345,platform=Java,sdkver=4.9.1,value=false count_sum=5i 1653643420000000000
# expression,flagname=E42,host=klaus,key=67890,platform=Golang,sdkver=1.18.3,value=true count_sum=42i 1653646789000000000
csv_timestamp_format = "2006-01-02T15:04:05Z07:00"
csv_header = true

View file

@ -0,0 +1,3 @@
timestamp,measurement,flagname,host,key,platform,sdkver,value,count_sum
2022-05-27T09:23:40Z,impression,F5,1cbbb3796fc2,12345,Java,4.9.1,false,5
2022-05-27T10:19:49Z,expression,E42,klaus,67890,Golang,1.18.3,true,42
1 timestamp measurement flagname host key platform sdkver value count_sum
2 2022-05-27T09:23:40Z impression F5 1cbbb3796fc2 12345 Java 4.9.1 false 5
3 2022-05-27T10:19:49Z expression E42 klaus 67890 Golang 1.18.3 true 42

View file

@ -0,0 +1,11 @@
# Example for outputting CSV in non-batch mode.
#
# Output File:
# testcases/semicolon.csv
#
# Input:
# impression,flagname=F5,host=1cbbb3796fc2,key=12345,platform=Java,sdkver=4.9.1,value=false count_sum=5i 1653643420000000000
# expression,flagname=E42,host=klaus,key=67890,platform=Golang,sdkver=1.18.3,value=true count_sum=42i 1653646789000000000
csv_separator = ";"
csv_header = true

View file

@ -0,0 +1,3 @@
timestamp;measurement;flagname;host;key;platform;sdkver;value;count_sum
1653643420;impression;F5;1cbbb3796fc2;12345;Java;4.9.1;false;5
1653646789;expression;E42;klaus;67890;Golang;1.18.3;true;42
1 timestamp measurement flagname host key platform sdkver value count_sum
2 1653643420 impression F5 1cbbb3796fc2 12345 Java 4.9.1 false 5
3 1653646789 expression E42 klaus 67890 Golang 1.18.3 true 42