1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,65 @@
# Executable Daemon Output Plugin
This plugin writes metrics to an external daemon program via `stdin`. The
command will be executed once and metrics will be passed to it on every write
in one of the supported [data formats][data_formats].
The executable and the individual parameters must be defined as a list.
All outputs of the executable to `stderr` will be logged in the Telegraf log.
Telegraf minimum version: Telegraf 1.15.0
⭐ Telegraf v1.15.0
🏷️ system
💻 all
[data_formats]: /docs/DATA_FORMATS_OUTPUT.md
## Global configuration options <!-- @/docs/includes/plugin_config.md -->
In addition to the plugin-specific configuration settings, plugins support
additional global and plugin configuration settings. These settings are used to
modify metrics, tags, and field or create aliases and configure ordering, etc.
See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
[CONFIGURATION.md]: ../../../docs/CONFIGURATION.md#plugins
## Configuration
```toml @sample.conf
# Run executable as long-running output plugin
[[outputs.execd]]
## One program to run as daemon.
## NOTE: process and each argument should each be their own string
command = ["my-telegraf-output", "--some-flag", "value"]
## Environment variables
## Array of "key=value" pairs to pass as environment variables
## e.g. "KEY=value", "USERNAME=John Doe",
## "LD_LIBRARY_PATH=/opt/custom/lib64:/usr/local/libs"
# environment = []
## Delay before the process is restarted after an unexpected termination
restart_delay = "10s"
## Flag to determine whether execd should throw error when part of metrics is unserializable
## Setting this to true will skip the unserializable metrics and process the rest of metrics
## Setting this to false will throw error when encountering unserializable metrics and none will be processed
## This setting does not apply when use_batch_format is set.
# ignore_serialization_error = false
## Use batch serialization instead of per metric. The batch format allows for the
## production of batch output formats and may more efficiently encode and write metrics.
# use_batch_format = false
## Data format to export.
## Each data format has its own unique set of configuration options, read
## more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_OUTPUT.md
data_format = "influx"
```
## Example
see [examples][]
[examples]: examples/

View file

@ -0,0 +1,5 @@
# Usage: sh file.sh output_filename.ext
# reads from stdin and writes out to a file named on the command line.
while read line; do
echo "$line" >> $1
done < /dev/stdin

View file

@ -0,0 +1,9 @@
[agent]
interval = "1s"
[[inputs.execd]]
command = ["ruby", "plugins/inputs/execd/examples/count.rb"]
[[outputs.execd]]
command = ["sh", "plugins/outputs/execd/examples/file/file.sh"]
data_format = "json"

View file

@ -0,0 +1,19 @@
#!/usr/bin/env ruby
#
# An example of funneling metrics to Redis pub/sub.
#
# to run this, you may need to:
# gem install redis
#
require 'redis'
r = Redis.new(host: "127.0.0.1", port: 6379, db: 1)
loop do
# example input: "counter_ruby count=0 1591741648101185000"
line = STDIN.readline.chomp
key = line.split(" ")[0]
key = key.split(",")[0]
r.publish(key, line)
end

View file

@ -0,0 +1,21 @@
#!/usr/bin/env ruby
#
# An example of funneling metrics to Redis pub/sub.
#
# to run this, you may need to:
# gem install redis
#
require 'redis'
require 'json'
r = Redis.new(host: "127.0.0.1", port: 6379, db: 1)
loop do
# example input: "{"fields":{"count":0},"name":"counter_ruby","tags":{"host":"localhost"},"timestamp":1586374982}"
line = STDIN.readline.chomp
l = JSON.parse(line)
key = l["name"]
r.publish(key, line)
end

View file

@ -0,0 +1,15 @@
[agent]
flush_interval = "1s"
interval = "1s"
[[inputs.execd]]
command = ["ruby", "plugins/inputs/execd/examples/count.rb"]
signal = "none"
[[outputs.execd]]
command = ["ruby", "plugins/outputs/execd/examples/redis/redis_influx.rb"]
data_format = "influx"
# [[outputs.file]]
# files = ["stdout"]
# data_format = "influx"

View file

@ -0,0 +1,134 @@
//go:generate ../../../tools/readme_config_includer/generator
package execd
import (
"bufio"
_ "embed"
"errors"
"fmt"
"io"
"strings"
"time"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal/process"
"github.com/influxdata/telegraf/plugins/outputs"
)
//go:embed sample.conf
var sampleConfig string
type Execd struct {
Command []string `toml:"command"`
Environment []string `toml:"environment"`
RestartDelay config.Duration `toml:"restart_delay"`
IgnoreSerializationError bool `toml:"ignore_serialization_error"`
UseBatchFormat bool `toml:"use_batch_format"`
Log telegraf.Logger
process *process.Process
serializer telegraf.Serializer
}
func (*Execd) SampleConfig() string {
return sampleConfig
}
func (e *Execd) SetSerializer(s telegraf.Serializer) {
e.serializer = s
}
func (e *Execd) Init() error {
if len(e.Command) == 0 {
return errors.New("no command specified")
}
var err error
e.process, err = process.New(e.Command, e.Environment)
if err != nil {
return fmt.Errorf("error creating process %s: %w", e.Command, err)
}
e.process.Log = e.Log
e.process.RestartDelay = time.Duration(e.RestartDelay)
e.process.ReadStdoutFn = e.cmdReadOut
e.process.ReadStderrFn = e.cmdReadErr
return nil
}
func (e *Execd) Connect() error {
if err := e.process.Start(); err != nil {
// if there was only one argument, and it contained spaces, warn the user
// that they may have configured it wrong.
if len(e.Command) == 1 && strings.Contains(e.Command[0], " ") {
e.Log.Warn("The outputs.execd Command contained spaces but no arguments. " +
"This setting expects the program and arguments as an array of strings, " +
"not as a space-delimited string. See the plugin readme for an example.")
}
return fmt.Errorf("failed to start process %s: %w", e.Command, err)
}
return nil
}
func (e *Execd) Close() error {
e.process.Stop()
return nil
}
func (e *Execd) Write(metrics []telegraf.Metric) error {
if e.UseBatchFormat {
b, err := e.serializer.SerializeBatch(metrics)
if err != nil {
return fmt.Errorf("error serializing metrics: %w", err)
}
if _, err = e.process.Stdin.Write(b); err != nil {
return fmt.Errorf("error writing metrics: %w", err)
}
return nil
}
for _, m := range metrics {
b, err := e.serializer.Serialize(m)
if err != nil {
if !e.IgnoreSerializationError {
return fmt.Errorf("error serializing metrics: %w", err)
}
e.Log.Errorf("Skipping metric due to a serialization error: %v", err)
continue
}
if _, err = e.process.Stdin.Write(b); err != nil {
return fmt.Errorf("error writing metrics: %w", err)
}
}
return nil
}
func (e *Execd) cmdReadErr(out io.Reader) {
scanner := bufio.NewScanner(out)
for scanner.Scan() {
e.Log.Errorf("stderr: %s", scanner.Text())
}
if err := scanner.Err(); err != nil {
e.Log.Errorf("Error reading stderr: %s", err)
}
}
func (e *Execd) cmdReadOut(out io.Reader) {
scanner := bufio.NewScanner(out)
for scanner.Scan() {
e.Log.Info(scanner.Text())
}
}
func init() {
outputs.Add("execd", func() telegraf.Output {
return &Execd{}
})
}

View file

@ -0,0 +1,264 @@
package execd
import (
"bufio"
"errors"
"flag"
"fmt"
"io"
"os"
"strconv"
"strings"
"sync"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/metric"
"github.com/influxdata/telegraf/plugins/parsers/influx"
serializers_influx "github.com/influxdata/telegraf/plugins/serializers/influx"
"github.com/influxdata/telegraf/testutil"
)
var now = time.Date(2020, 6, 30, 16, 16, 0, 0, time.UTC)
func TestExternalOutputWorks(t *testing.T) {
serializer := &serializers_influx.Serializer{}
require.NoError(t, serializer.Init())
exe, err := os.Executable()
require.NoError(t, err)
e := &Execd{
Command: []string{exe, "-testoutput"},
Environment: []string{"PLUGINS_OUTPUTS_EXECD_MODE=application", "METRIC_NAME=cpu", "METRIC_NUM=1"},
RestartDelay: config.Duration(5 * time.Second),
serializer: serializer,
Log: testutil.Logger{},
}
require.NoError(t, e.Init())
wg := &sync.WaitGroup{}
wg.Add(1)
e.process.ReadStderrFn = func(rstderr io.Reader) {
scanner := bufio.NewScanner(rstderr)
for scanner.Scan() {
t.Errorf("stderr: %q", scanner.Text())
}
if err := scanner.Err(); err != nil {
if !strings.HasSuffix(err.Error(), "already closed") {
t.Errorf("error reading stderr: %v", err)
}
}
wg.Done()
}
m := metric.New(
"cpu",
map[string]string{"name": "cpu1"},
map[string]interface{}{"idle": 50, "sys": 30},
now,
)
require.NoError(t, e.Connect())
require.NoError(t, e.Write([]telegraf.Metric{m}))
require.NoError(t, e.Close())
wg.Wait()
}
func TestBatchOutputWorks(t *testing.T) {
serializer := &serializers_influx.Serializer{}
require.NoError(t, serializer.Init())
exe, err := os.Executable()
require.NoError(t, err)
e := &Execd{
Command: []string{exe, "-testoutput"},
Environment: []string{"PLUGINS_OUTPUTS_EXECD_MODE=application", "METRIC_NAME=cpu", "METRIC_NUM=2"},
RestartDelay: config.Duration(5 * time.Second),
UseBatchFormat: true,
serializer: serializer,
Log: testutil.Logger{},
}
require.NoError(t, e.Init())
wg := &sync.WaitGroup{}
wg.Add(1)
e.process.ReadStderrFn = func(rstderr io.Reader) {
scanner := bufio.NewScanner(rstderr)
for scanner.Scan() {
t.Errorf("stderr: %q", scanner.Text())
}
if err := scanner.Err(); err != nil {
if !strings.HasSuffix(err.Error(), "already closed") {
t.Errorf("error reading stderr: %v", err)
}
}
wg.Done()
}
m := metric.New(
"cpu",
map[string]string{"name": "cpu1"},
map[string]interface{}{"idle": 50, "sys": 30},
now,
)
m2 := metric.New(
"cpu",
map[string]string{"name": "cpu1"},
map[string]interface{}{"idle": 50, "sys": 30},
now,
)
require.NoError(t, e.Connect())
require.NoError(t, e.Write([]telegraf.Metric{m, m2}))
require.NoError(t, e.Close())
wg.Wait()
}
func TestPartiallyUnserializableThrowError(t *testing.T) {
serializer := &serializers_influx.Serializer{}
require.NoError(t, serializer.Init())
exe, err := os.Executable()
require.NoError(t, err)
e := &Execd{
Command: []string{exe, "-testoutput"},
Environment: []string{"PLUGINS_OUTPUTS_EXECD_MODE=application", "METRIC_NAME=cpu"},
RestartDelay: config.Duration(5 * time.Second),
IgnoreSerializationError: false,
serializer: serializer,
Log: testutil.Logger{},
}
require.NoError(t, e.Init())
m1 := metric.New(
"cpu",
map[string]string{"name": "cpu1"},
map[string]interface{}{"idle": 50, "sys": 30},
now,
)
m2 := metric.New(
"cpu",
map[string]string{"name": "cpu2"},
map[string]interface{}{},
now,
)
require.NoError(t, e.Connect())
require.Error(t, e.Write([]telegraf.Metric{m1, m2}))
require.NoError(t, e.Close())
}
func TestPartiallyUnserializableCanBeSkipped(t *testing.T) {
serializer := &serializers_influx.Serializer{}
require.NoError(t, serializer.Init())
exe, err := os.Executable()
require.NoError(t, err)
e := &Execd{
Command: []string{exe, "-testoutput"},
Environment: []string{"PLUGINS_OUTPUTS_EXECD_MODE=application", "METRIC_NAME=cpu"},
RestartDelay: config.Duration(5 * time.Second),
IgnoreSerializationError: true,
serializer: serializer,
Log: testutil.Logger{},
}
require.NoError(t, e.Init())
m1 := metric.New(
"cpu",
map[string]string{"name": "cpu1"},
map[string]interface{}{"idle": 50, "sys": 30},
now,
)
m2 := metric.New(
"cpu",
map[string]string{"name": "cpu2"},
map[string]interface{}{},
now,
)
require.NoError(t, e.Connect())
require.NoError(t, e.Write([]telegraf.Metric{m1, m2}))
require.NoError(t, e.Close())
}
var testoutput = flag.Bool("testoutput", false,
"if true, act like line input program instead of test")
func TestMain(m *testing.M) {
flag.Parse()
runMode := os.Getenv("PLUGINS_OUTPUTS_EXECD_MODE")
if *testoutput && runMode == "application" {
runOutputConsumerProgram()
os.Exit(0)
}
code := m.Run()
os.Exit(code)
}
func runOutputConsumerProgram() {
metricName := os.Getenv("METRIC_NAME")
expectedMetrics, err := strconv.Atoi(os.Getenv("METRIC_NUM"))
if err != nil {
fmt.Fprintf(os.Stderr, "could not parse METRIC_NUM\n")
//nolint:revive // error code is important for this "test"
os.Exit(1)
}
parser := influx.NewStreamParser(os.Stdin)
numMetrics := 0
for {
m, err := parser.Next()
if err != nil {
if errors.Is(err, influx.EOF) {
break // stream ended
}
var parseErr *influx.ParseError
if errors.As(err, &parseErr) {
fmt.Fprintf(os.Stderr, "parse ERR %v\n", parseErr)
//nolint:revive // error code is important for this "test"
os.Exit(1)
}
fmt.Fprintf(os.Stderr, "ERR %v\n", err)
//nolint:revive // error code is important for this "test"
os.Exit(1)
}
numMetrics++
expected := testutil.MustMetric(metricName,
map[string]string{"name": "cpu1"},
map[string]interface{}{"idle": 50, "sys": 30},
now,
)
if !testutil.MetricEqual(expected, m) {
fmt.Fprintf(os.Stderr, "metric doesn't match expected\n")
//nolint:revive // error code is important for this "test"
os.Exit(1)
}
}
if expectedMetrics != numMetrics {
fmt.Fprintf(os.Stderr, "number of metrics doesn't match expected: %v, %v\n", numMetrics, expectedMetrics)
//nolint:revive // error code is important for this "test"
os.Exit(1)
}
}

View file

@ -0,0 +1,30 @@
# Run executable as long-running output plugin
[[outputs.execd]]
## One program to run as daemon.
## NOTE: process and each argument should each be their own string
command = ["my-telegraf-output", "--some-flag", "value"]
## Environment variables
## Array of "key=value" pairs to pass as environment variables
## e.g. "KEY=value", "USERNAME=John Doe",
## "LD_LIBRARY_PATH=/opt/custom/lib64:/usr/local/libs"
# environment = []
## Delay before the process is restarted after an unexpected termination
restart_delay = "10s"
## Flag to determine whether execd should throw error when part of metrics is unserializable
## Setting this to true will skip the unserializable metrics and process the rest of metrics
## Setting this to false will throw error when encountering unserializable metrics and none will be processed
## This setting does not apply when use_batch_format is set.
# ignore_serialization_error = false
## Use batch serialization instead of per metric. The batch format allows for the
## production of batch output formats and may more efficiently encode and write metrics.
# use_batch_format = false
## Data format to export.
## Each data format has its own unique set of configuration options, read
## more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_OUTPUT.md
data_format = "influx"