1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,58 @@
# Collectd Parser Plugin
The collectd format parses the collectd binary network protocol. Tags are
created for host, instance, type, and type instance. All collectd values are
added as float64 fields.
For more information about the binary network protocol see
[here](https://collectd.org/wiki/index.php/Binary_protocol).
You can control the cryptographic settings with parser options. Create an
authentication file and set `collectd_auth_file` to the path of the file, then
set the desired security level in `collectd_security_level`.
Additional information including client setup can be found [here][1].
You can also change the path to the typesdb or add additional typesdb using
`collectd_typesdb`.
[1]: https://collectd.org/wiki/index.php/Networking_introduction#Cryptographic_setup
## Configuration
```toml
[[inputs.socket_listener]]
service_address = "udp://:25826"
## Data format to consume.
## Each data format has its own unique set of configuration options, read
## more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "collectd"
## Authentication file for cryptographic security levels
collectd_auth_file = "/etc/collectd/auth_file"
## One of none (default), sign, or encrypt
collectd_security_level = "encrypt"
## Path of to TypesDB specifications
collectd_typesdb = ["/usr/share/collectd/types.db"]
## Multi-value plugins can be handled two ways.
## "split" will parse and store the multi-value plugin data into separate measurements
## "join" will parse and store the multi-value plugin as a single multi-value measurement.
## "split" is the default behavior for backward compatibility with previous versions of influxdb.
collectd_parse_multivalue = "split"
```
## Example Output
```text
memory,type=memory,type_instance=buffered value=2520051712 1560455990829955922
memory,type=memory,type_instance=used value=3710791680 1560455990829955922
memory,type=memory,type_instance=buffered value=2520047616 1560455980830417318
memory,type=memory,type_instance=cached value=9472626688 1560455980830417318
memory,type=memory,type_instance=slab_recl value=2088894464 1560455980830417318
memory,type=memory,type_instance=slab_unrecl value=146984960 1560455980830417318
memory,type=memory,type_instance=free value=2978258944 1560455980830417318
memory,type=memory,type_instance=used value=3707047936 1560455980830417318
```

View file

@ -0,0 +1,208 @@
package collectd
import (
"errors"
"fmt"
"os"
"collectd.org/api"
"collectd.org/network"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric"
"github.com/influxdata/telegraf/plugins/parsers"
)
const (
DefaultAuthFile = "/etc/collectd/auth_file"
)
type Parser struct {
DefaultTags map[string]string `toml:"-"`
// whether or not to split multi value metric into multiple metrics
// default value is split
ParseMultiValue string `toml:"collectd_parse_multivalue"`
popts network.ParseOpts
AuthFile string `toml:"collectd_auth_file"`
SecurityLevel string `toml:"collectd_security_level"`
TypesDB []string `toml:"collectd_typesdb"`
Log telegraf.Logger `toml:"-"`
}
func (p *Parser) Init() error {
switch p.SecurityLevel {
case "none":
p.popts.SecurityLevel = network.None
case "sign":
p.popts.SecurityLevel = network.Sign
case "encrypt":
p.popts.SecurityLevel = network.Encrypt
default:
p.popts.SecurityLevel = network.None
}
if p.AuthFile == "" {
p.AuthFile = DefaultAuthFile
}
p.popts.PasswordLookup = network.NewAuthFile(p.AuthFile)
for _, path := range p.TypesDB {
db, err := LoadTypesDB(path)
if err != nil {
return err
}
if p.popts.TypesDB != nil {
p.popts.TypesDB.Merge(db)
} else {
p.popts.TypesDB = db
}
}
return nil
}
func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
valueLists, err := network.Parse(buf, p.popts)
if err != nil {
return nil, fmt.Errorf("collectd parser error: %w", err)
}
metrics := make([]telegraf.Metric, 0, len(valueLists))
for _, valueList := range valueLists {
metrics = append(metrics, p.unmarshalValueList(valueList)...)
}
if len(p.DefaultTags) > 0 {
for _, m := range metrics {
for k, v := range p.DefaultTags {
// only set the default tag if it doesn't already exist:
if !m.HasTag(k) {
m.AddTag(k, v)
}
}
}
}
return metrics, nil
}
func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
metrics, err := p.Parse([]byte(line))
if err != nil {
return nil, err
}
if len(metrics) != 1 {
return nil, errors.New("line contains multiple metrics")
}
return metrics[0], nil
}
func (p *Parser) SetDefaultTags(tags map[string]string) {
p.DefaultTags = tags
}
// unmarshalValueList translates a ValueList into a Telegraf metric.
func (p *Parser) unmarshalValueList(vl *api.ValueList) []telegraf.Metric {
timestamp := vl.Time.UTC()
var metrics []telegraf.Metric
var multiValue = p.ParseMultiValue
// set multiValue to default "split" if nothing is specified
if multiValue == "" {
multiValue = "split"
}
switch multiValue {
case "split":
for i := range vl.Values {
name := fmt.Sprintf("%s_%s", vl.Identifier.Plugin, vl.DSName(i))
tags := make(map[string]string)
fields := make(map[string]interface{})
// Convert interface back to actual type, then to float64
switch value := vl.Values[i].(type) {
case api.Gauge:
fields["value"] = float64(value)
case api.Derive:
fields["value"] = float64(value)
case api.Counter:
fields["value"] = float64(value)
}
if vl.Identifier.Host != "" {
tags["host"] = vl.Identifier.Host
}
if vl.Identifier.PluginInstance != "" {
tags["instance"] = vl.Identifier.PluginInstance
}
if vl.Identifier.Type != "" {
tags["type"] = vl.Identifier.Type
}
if vl.Identifier.TypeInstance != "" {
tags["type_instance"] = vl.Identifier.TypeInstance
}
// Drop invalid points
m := metric.New(name, tags, fields, timestamp)
metrics = append(metrics, m)
}
case "join":
name := vl.Identifier.Plugin
tags := make(map[string]string)
fields := make(map[string]interface{})
for i := range vl.Values {
switch value := vl.Values[i].(type) {
case api.Gauge:
fields[vl.DSName(i)] = float64(value)
case api.Derive:
fields[vl.DSName(i)] = float64(value)
case api.Counter:
fields[vl.DSName(i)] = float64(value)
}
if vl.Identifier.Host != "" {
tags["host"] = vl.Identifier.Host
}
if vl.Identifier.PluginInstance != "" {
tags["instance"] = vl.Identifier.PluginInstance
}
if vl.Identifier.Type != "" {
tags["type"] = vl.Identifier.Type
}
if vl.Identifier.TypeInstance != "" {
tags["type_instance"] = vl.Identifier.TypeInstance
}
}
m := metric.New(name, tags, fields, timestamp)
metrics = append(metrics, m)
default:
p.Log.Info("parse-multi-value config can only be 'split' or 'join'")
}
return metrics
}
func LoadTypesDB(path string) (*api.TypesDB, error) {
reader, err := os.Open(path)
if err != nil {
return nil, err
}
return api.NewTypesDB(reader)
}
func init() {
parsers.Add("collectd",
func(string) telegraf.Parser {
return &Parser{
AuthFile: DefaultAuthFile,
}
})
}

View file

@ -0,0 +1,410 @@
package collectd
import (
"context"
"testing"
"time"
"collectd.org/api"
"collectd.org/network"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric"
"github.com/influxdata/telegraf/testutil"
)
type AuthMap struct {
Passwd map[string]string
}
func (p *AuthMap) Password(user string) (string, error) {
return p.Passwd[user], nil
}
type metricData struct {
name string
tags map[string]string
fields map[string]interface{}
}
type testCase struct {
vl []api.ValueList
expected []metricData
}
var singleMetric = testCase{
[]api.ValueList{
{
Identifier: api.Identifier{
Host: "xyzzy",
Plugin: "cpu",
PluginInstance: "1",
Type: "cpu",
TypeInstance: "user",
},
Values: []api.Value{
api.Counter(42),
},
DSNames: []string(nil),
},
},
[]metricData{
{
"cpu_value",
map[string]string{
"type_instance": "user",
"host": "xyzzy",
"instance": "1",
"type": "cpu",
},
map[string]interface{}{
"value": float64(42),
},
},
},
}
var multiMetric = testCase{
[]api.ValueList{
{
Identifier: api.Identifier{
Host: "xyzzy",
Plugin: "cpu",
PluginInstance: "0",
Type: "cpu",
TypeInstance: "user",
},
Values: []api.Value{
api.Derive(42),
api.Gauge(42),
},
DSNames: []string{"t1", "t2"},
},
},
[]metricData{
{
"cpu_0",
map[string]string{
"type_instance": "user",
"host": "xyzzy",
"instance": "0",
"type": "cpu",
},
map[string]interface{}{
"value": float64(42),
},
},
{
"cpu_1",
map[string]string{
"type_instance": "user",
"host": "xyzzy",
"instance": "0",
"type": "cpu",
},
map[string]interface{}{
"value": float64(42),
},
},
},
}
func TestNewCollectdParser(t *testing.T) {
parser := Parser{
ParseMultiValue: "join",
}
require.NoError(t, parser.Init())
require.Equal(t, network.None, parser.popts.SecurityLevel)
require.NotNil(t, parser.popts.PasswordLookup)
require.Nil(t, parser.popts.TypesDB)
}
func TestParse(t *testing.T) {
cases := []testCase{singleMetric, multiMetric}
for _, tc := range cases {
buf, err := writeValueList(t.Context(), tc.vl)
require.NoError(t, err)
bytes, err := buf.Bytes()
require.NoError(t, err)
parser := &Parser{}
require.NoError(t, parser.Init())
metrics, err := parser.Parse(bytes)
require.NoError(t, err)
assertEqualMetrics(t, tc.expected, metrics)
}
}
func TestParseMultiValueSplit(t *testing.T) {
buf, err := writeValueList(t.Context(), multiMetric.vl)
require.NoError(t, err)
bytes, err := buf.Bytes()
require.NoError(t, err)
parser := &Parser{ParseMultiValue: "split"}
require.NoError(t, parser.Init())
metrics, err := parser.Parse(bytes)
require.NoError(t, err)
require.Len(t, metrics, 2)
}
func TestParseMultiValueJoin(t *testing.T) {
buf, err := writeValueList(t.Context(), multiMetric.vl)
require.NoError(t, err)
bytes, err := buf.Bytes()
require.NoError(t, err)
parser := &Parser{ParseMultiValue: "join"}
require.NoError(t, parser.Init())
metrics, err := parser.Parse(bytes)
require.NoError(t, err)
require.Len(t, metrics, 1)
}
func TestParse_DefaultTags(t *testing.T) {
buf, err := writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
bytes, err := buf.Bytes()
require.NoError(t, err)
parser := &Parser{}
require.NoError(t, parser.Init())
parser.SetDefaultTags(map[string]string{
"foo": "bar",
})
require.NoError(t, err)
metrics, err := parser.Parse(bytes)
require.NoError(t, err)
require.Equal(t, "bar", metrics[0].Tags()["foo"])
}
func TestParse_SignSecurityLevel(t *testing.T) {
parser := &Parser{
SecurityLevel: "sign",
AuthFile: "testdata/authfile",
}
require.NoError(t, parser.Init())
// Signed data
buf, err := writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
buf.Sign("user0", "bar")
bytes, err := buf.Bytes()
require.NoError(t, err)
metrics, err := parser.Parse(bytes)
require.NoError(t, err)
assertEqualMetrics(t, singleMetric.expected, metrics)
// Encrypted data
buf, err = writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
buf.Encrypt("user0", "bar")
bytes, err = buf.Bytes()
require.NoError(t, err)
metrics, err = parser.Parse(bytes)
require.NoError(t, err)
assertEqualMetrics(t, singleMetric.expected, metrics)
// Plain text data skipped
buf, err = writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
bytes, err = buf.Bytes()
require.NoError(t, err)
metrics, err = parser.Parse(bytes)
require.NoError(t, err)
require.Empty(t, metrics)
// Wrong password error
buf, err = writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
buf.Sign("x", "y")
bytes, err = buf.Bytes()
require.NoError(t, err)
_, err = parser.Parse(bytes)
require.Error(t, err)
}
func TestParse_EncryptSecurityLevel(t *testing.T) {
parser := &Parser{
SecurityLevel: "encrypt",
AuthFile: "testdata/authfile",
}
require.NoError(t, parser.Init())
// Signed data skipped
buf, err := writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
buf.Sign("user0", "bar")
bytes, err := buf.Bytes()
require.NoError(t, err)
metrics, err := parser.Parse(bytes)
require.NoError(t, err)
require.Empty(t, metrics)
// Encrypted data
buf, err = writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
buf.Encrypt("user0", "bar")
bytes, err = buf.Bytes()
require.NoError(t, err)
metrics, err = parser.Parse(bytes)
require.NoError(t, err)
assertEqualMetrics(t, singleMetric.expected, metrics)
// Plain text data skipped
buf, err = writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
bytes, err = buf.Bytes()
require.NoError(t, err)
metrics, err = parser.Parse(bytes)
require.NoError(t, err)
require.Empty(t, metrics)
// Wrong password error
buf, err = writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
buf.Sign("x", "y")
bytes, err = buf.Bytes()
require.NoError(t, err)
_, err = parser.Parse(bytes)
require.Error(t, err)
}
func TestParseLine(t *testing.T) {
buf, err := writeValueList(t.Context(), singleMetric.vl)
require.NoError(t, err)
bytes, err := buf.Bytes()
require.NoError(t, err)
parser := Parser{
ParseMultiValue: "split",
}
require.NoError(t, parser.Init())
m, err := parser.ParseLine(string(bytes))
require.NoError(t, err)
assertEqualMetrics(t, singleMetric.expected, []telegraf.Metric{m})
}
func writeValueList(testContext context.Context, valueLists []api.ValueList) (*network.Buffer, error) {
buffer := network.NewBuffer(0)
for i := range valueLists {
err := buffer.Write(testContext, &valueLists[i])
if err != nil {
return nil, err
}
}
return buffer, nil
}
func assertEqualMetrics(t *testing.T, expected []metricData, received []telegraf.Metric) {
require.Len(t, received, len(expected))
for i, m := range received {
require.Equal(t, expected[i].name, m.Name())
require.Equal(t, expected[i].tags, m.Tags())
require.Equal(t, expected[i].fields, m.Fields())
}
}
var benchmarkData = []api.ValueList{
{
Identifier: api.Identifier{
Host: "xyzzy",
Plugin: "cpu",
PluginInstance: "1",
Type: "cpu",
TypeInstance: "user",
},
Values: []api.Value{
api.Counter(4),
},
DSNames: []string(nil),
},
{
Identifier: api.Identifier{
Host: "xyzzy",
Plugin: "cpu",
PluginInstance: "2",
Type: "cpu",
TypeInstance: "user",
},
Values: []api.Value{
api.Counter(5),
},
DSNames: []string(nil),
},
}
func TestBenchmarkData(t *testing.T) {
expected := []telegraf.Metric{
metric.New(
"cpu_value",
map[string]string{
"host": "xyzzy",
"instance": "1",
"type": "cpu",
"type_instance": "user",
},
map[string]interface{}{
"value": 4.0,
},
time.Unix(0, 0),
),
metric.New(
"cpu_value",
map[string]string{
"host": "xyzzy",
"instance": "2",
"type": "cpu",
"type_instance": "user",
},
map[string]interface{}{
"value": 5.0,
},
time.Unix(0, 0),
),
}
buf, err := writeValueList(t.Context(), benchmarkData)
require.NoError(t, err)
bytes, err := buf.Bytes()
require.NoError(t, err)
parser := &Parser{}
require.NoError(t, parser.Init())
actual, err := parser.Parse(bytes)
require.NoError(t, err)
testutil.RequireMetricsEqual(t, expected, actual, testutil.IgnoreTime(), testutil.SortMetrics())
}
func BenchmarkParsing(b *testing.B) {
buf, err := writeValueList(b.Context(), benchmarkData)
require.NoError(b, err)
bytes, err := buf.Bytes()
require.NoError(b, err)
parser := &Parser{}
require.NoError(b, parser.Init())
b.ResetTimer()
for n := 0; n < b.N; n++ {
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
parser.Parse(bytes)
}
}

View file

@ -0,0 +1 @@
user0: bar