1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

1
config/README.md Symbolic link
View file

@ -0,0 +1 @@
../docs/CONFIGURATION.md

1924
config/config.go Normal file

File diff suppressed because it is too large Load diff

1540
config/config_test.go Normal file

File diff suppressed because it is too large Load diff

393
config/deprecation.go Normal file
View file

@ -0,0 +1,393 @@
package config
import (
"errors"
"fmt"
"log"
"reflect"
"sort"
"strings"
"github.com/coreos/go-semver/semver"
"github.com/fatih/color"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/aggregators"
"github.com/influxdata/telegraf/plugins/inputs"
"github.com/influxdata/telegraf/plugins/outputs"
"github.com/influxdata/telegraf/plugins/processors"
)
// DeprecationInfo contains all important information to describe a deprecated entity
type DeprecationInfo struct {
// Name of the plugin or plugin option
Name string
// LogLevel is the level of deprecation which currently corresponds to a log-level
logLevel telegraf.LogLevel
info telegraf.DeprecationInfo
}
func (di *DeprecationInfo) determineEscalation() error {
di.logLevel = telegraf.None
if di.info.Since == "" {
return nil
}
since, err := semver.NewVersion(di.info.Since)
if err != nil {
return fmt.Errorf("cannot parse 'since' version %q: %w", di.info.Since, err)
}
var removal *semver.Version
if di.info.RemovalIn != "" {
removal, err = semver.NewVersion(di.info.RemovalIn)
if err != nil {
return fmt.Errorf("cannot parse 'removal' version %q: %w", di.info.RemovalIn, err)
}
} else {
removal = &semver.Version{Major: since.Major}
removal.BumpMajor()
di.info.RemovalIn = removal.String()
}
// Drop potential pre-release tags
version := semver.Version{
Major: telegrafVersion.Major,
Minor: telegrafVersion.Minor,
Patch: telegrafVersion.Patch,
}
if !version.LessThan(*removal) {
di.logLevel = telegraf.Error
} else if !version.LessThan(*since) {
di.logLevel = telegraf.Warn
}
return nil
}
// PluginDeprecationInfo holds all information about a deprecated plugin or it's options
type PluginDeprecationInfo struct {
DeprecationInfo
// Options deprecated for this plugin
Options []DeprecationInfo
}
func (c *Config) incrementPluginDeprecations(category string) {
newcounts := []int64{1, 0}
if counts, found := c.Deprecations[category]; found {
newcounts = []int64{counts[0] + 1, counts[1]}
}
c.Deprecations[category] = newcounts
}
func (c *Config) incrementPluginOptionDeprecations(category string) {
newcounts := []int64{0, 1}
if counts, found := c.Deprecations[category]; found {
newcounts = []int64{counts[0], counts[1] + 1}
}
c.Deprecations[category] = newcounts
}
func (c *Config) collectDeprecationInfo(category, name string, plugin interface{}, all bool) PluginDeprecationInfo {
info := PluginDeprecationInfo{
DeprecationInfo: DeprecationInfo{
Name: category + "." + name,
logLevel: telegraf.None,
},
}
// First check if the whole plugin is deprecated
switch category {
case "aggregators":
if pi, deprecated := aggregators.Deprecations[name]; deprecated {
info.DeprecationInfo.info = pi
}
case "inputs":
if pi, deprecated := inputs.Deprecations[name]; deprecated {
info.DeprecationInfo.info = pi
}
case "outputs":
if pi, deprecated := outputs.Deprecations[name]; deprecated {
info.DeprecationInfo.info = pi
}
case "processors":
if pi, deprecated := processors.Deprecations[name]; deprecated {
info.DeprecationInfo.info = pi
}
}
if err := info.determineEscalation(); err != nil {
panic(fmt.Errorf("plugin %q: %w", info.Name, err))
}
if info.logLevel != telegraf.None {
c.incrementPluginDeprecations(category)
}
// Allow checking for names only.
if plugin == nil {
return info
}
// Check for deprecated options
walkPluginStruct(reflect.ValueOf(plugin), func(field reflect.StructField, value reflect.Value) {
// Try to report only those fields that are set
if !all && value.IsZero() {
return
}
tags := strings.SplitN(field.Tag.Get("deprecated"), ";", 3)
if len(tags) < 1 || tags[0] == "" {
return
}
optionInfo := DeprecationInfo{Name: field.Name}
optionInfo.info.Since = tags[0]
if len(tags) > 1 {
optionInfo.info.Notice = tags[len(tags)-1]
}
if len(tags) > 2 {
optionInfo.info.RemovalIn = tags[1]
}
if err := optionInfo.determineEscalation(); err != nil {
panic(fmt.Errorf("plugin %q option %q: %w", info.Name, field.Name, err))
}
if optionInfo.logLevel != telegraf.None {
c.incrementPluginOptionDeprecations(category)
}
// Get the toml field name
option := field.Tag.Get("toml")
if option != "" {
optionInfo.Name = option
}
info.Options = append(info.Options, optionInfo)
})
return info
}
func (c *Config) printUserDeprecation(category, name string, plugin interface{}) error {
info := c.collectDeprecationInfo(category, name, plugin, false)
printPluginDeprecationNotice(info.logLevel, info.Name, info.info)
if info.logLevel == telegraf.Error {
return errors.New("plugin deprecated")
}
// Print deprecated options
deprecatedOptions := make([]string, 0)
for _, option := range info.Options {
PrintOptionDeprecationNotice(info.Name, option.Name, option.info)
if option.logLevel == telegraf.Error {
deprecatedOptions = append(deprecatedOptions, option.Name)
}
}
if len(deprecatedOptions) > 0 {
return fmt.Errorf("plugin options %q deprecated", strings.Join(deprecatedOptions, ","))
}
return nil
}
func (c *Config) CollectDeprecationInfos(inFilter, outFilter, aggFilter, procFilter []string) map[string][]PluginDeprecationInfo {
infos := make(map[string][]PluginDeprecationInfo)
infos["inputs"] = make([]PluginDeprecationInfo, 0)
for name, creator := range inputs.Inputs {
if len(inFilter) > 0 && !sliceContains(name, inFilter) {
continue
}
plugin := creator()
info := c.collectDeprecationInfo("inputs", name, plugin, true)
if info.logLevel != telegraf.None || len(info.Options) > 0 {
infos["inputs"] = append(infos["inputs"], info)
}
}
infos["outputs"] = make([]PluginDeprecationInfo, 0)
for name, creator := range outputs.Outputs {
if len(outFilter) > 0 && !sliceContains(name, outFilter) {
continue
}
plugin := creator()
info := c.collectDeprecationInfo("outputs", name, plugin, true)
if info.logLevel != telegraf.None || len(info.Options) > 0 {
infos["outputs"] = append(infos["outputs"], info)
}
}
infos["processors"] = make([]PluginDeprecationInfo, 0)
for name, creator := range processors.Processors {
if len(procFilter) > 0 && !sliceContains(name, procFilter) {
continue
}
plugin := creator()
info := c.collectDeprecationInfo("processors", name, plugin, true)
if info.logLevel != telegraf.None || len(info.Options) > 0 {
infos["processors"] = append(infos["processors"], info)
}
}
infos["aggregators"] = make([]PluginDeprecationInfo, 0)
for name, creator := range aggregators.Aggregators {
if len(aggFilter) > 0 && !sliceContains(name, aggFilter) {
continue
}
plugin := creator()
info := c.collectDeprecationInfo("aggregators", name, plugin, true)
if info.logLevel != telegraf.None || len(info.Options) > 0 {
infos["aggregators"] = append(infos["aggregators"], info)
}
}
return infos
}
func (*Config) PrintDeprecationList(plugins []PluginDeprecationInfo) {
sort.Slice(plugins, func(i, j int) bool { return plugins[i].Name < plugins[j].Name })
for _, plugin := range plugins {
switch plugin.logLevel {
case telegraf.Warn, telegraf.Error:
fmt.Printf(
" %-40s %-5s since %-5s removal in %-5s %s\n",
plugin.Name, plugin.logLevel, plugin.info.Since, plugin.info.RemovalIn, plugin.info.Notice,
)
}
if len(plugin.Options) < 1 {
continue
}
sort.Slice(plugin.Options, func(i, j int) bool { return plugin.Options[i].Name < plugin.Options[j].Name })
for _, option := range plugin.Options {
fmt.Printf(
" %-40s %-5s since %-5s removal in %-5s %s\n",
plugin.Name+"/"+option.Name, option.logLevel, option.info.Since, option.info.RemovalIn, option.info.Notice,
)
}
}
}
func printHistoricPluginDeprecationNotice(category, name string, info telegraf.DeprecationInfo) {
prefix := "E! " + color.RedString("DeprecationError")
log.Printf(
"%s: Plugin %q deprecated since version %s and removed: %s",
prefix, category+"."+name, info.Since, info.Notice,
)
}
// walkPluginStruct iterates over the fields of a structure in depth-first search (to cover nested structures)
// and calls the given function for every visited field.
func walkPluginStruct(value reflect.Value, fn func(f reflect.StructField, fv reflect.Value)) {
v := reflect.Indirect(value)
t := v.Type()
// Only works on structs
if t.Kind() != reflect.Struct {
return
}
// Walk over the struct fields and call the given function. If we encounter more complex embedded
// elements (structs, slices/arrays, maps) we need to descend into those elements as they might
// contain structures nested in the current structure.
for i := 0; i < t.NumField(); i++ {
field := t.Field(i)
fieldValue := v.Field(i)
if field.PkgPath != "" {
continue
}
switch field.Type.Kind() {
case reflect.Struct:
walkPluginStruct(fieldValue, fn)
case reflect.Array, reflect.Slice:
for j := 0; j < fieldValue.Len(); j++ {
element := fieldValue.Index(j)
// The array might contain structs
walkPluginStruct(element, fn)
fn(field, element)
}
case reflect.Map:
iter := fieldValue.MapRange()
for iter.Next() {
element := iter.Value()
// The map might contain structs
walkPluginStruct(element, fn)
fn(field, element)
}
}
fn(field, fieldValue)
}
}
func deprecationPrefix(level telegraf.LogLevel) string {
switch level {
case telegraf.Warn:
return "W! " + color.YellowString("DeprecationWarning")
case telegraf.Error:
return "E! " + color.RedString("DeprecationError")
}
return ""
}
func printPluginDeprecationNotice(level telegraf.LogLevel, name string, info telegraf.DeprecationInfo) {
switch level {
case telegraf.Warn, telegraf.Error:
prefix := deprecationPrefix(level)
log.Printf(
"%s: Plugin %q deprecated since version %s and will be removed in %s: %s",
prefix, name, info.Since, info.RemovalIn, info.Notice,
)
}
}
func PrintOptionDeprecationNotice(plugin, option string, info telegraf.DeprecationInfo) {
// Determine the log-level
di := &DeprecationInfo{
Name: plugin,
info: info,
}
if err := di.determineEscalation(); err != nil {
log.Printf("E! Determining log-level for option %s in plugin %s failed: %v", option, plugin, err)
return
}
switch di.logLevel {
case telegraf.Warn, telegraf.Error:
prefix := deprecationPrefix(di.logLevel)
log.Printf(
"%s: Option %q of plugin %q deprecated since version %s and will be removed in %s: %s",
prefix, option, plugin, di.info.Since, di.info.RemovalIn, di.info.Notice,
)
}
}
func PrintOptionValueDeprecationNotice(plugin, option string, value interface{}, info telegraf.DeprecationInfo) {
// Determine the log-level
di := &DeprecationInfo{
Name: plugin,
info: info,
}
if err := di.determineEscalation(); err != nil {
log.Printf("E! Determining log-level for option %s in plugin %s failed: %v", option, plugin, err)
return
}
switch di.logLevel {
case telegraf.Warn, telegraf.Error:
prefix := deprecationPrefix(di.logLevel)
log.Printf(
`%s: Value "%+v" for option %q of plugin %q deprecated since version %s and will be removed in %s: %s`,
prefix, value, option, plugin, di.info.Since, di.info.RemovalIn, di.info.Notice,
)
}
}

277
config/deprecation_test.go Normal file
View file

@ -0,0 +1,277 @@
package config
import (
"bufio"
"bytes"
"log"
"strings"
"testing"
"time"
"github.com/coreos/go-semver/semver"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
)
func TestPluginDeprecation(t *testing.T) {
info := telegraf.DeprecationInfo{
Since: "1.23.0",
RemovalIn: "2.0.0",
Notice: "please check",
}
var tests = []struct {
name string
level telegraf.LogLevel
expected string
}{
{
name: "Error level",
level: telegraf.Error,
expected: `Plugin "test" deprecated since version 1.23.0 and will be removed in 2.0.0: please check`,
},
{
name: "Warn level",
level: telegraf.Warn,
expected: `Plugin "test" deprecated since version 1.23.0 and will be removed in 2.0.0: please check`,
},
{
name: "None",
level: telegraf.None,
expected: ``,
},
}
// Switch the logger to log to a buffer
var buf bytes.Buffer
scanner := bufio.NewScanner(&buf)
previous := log.Writer()
log.SetOutput(&buf)
defer log.SetOutput(previous)
msg := make(chan string, 1)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
buf.Reset()
printPluginDeprecationNotice(tt.level, "test", info)
// Wait for a newline to arrive and timeout for cases where
// we don't see a message.
go func() {
scanner.Scan()
msg <- scanner.Text()
}()
// Reduce the timeout if we do not expect a message
timeout := 1 * time.Second
if tt.expected == "" {
timeout = 100 * time.Microsecond
}
var actual string
select {
case actual = <-msg:
case <-time.After(timeout):
}
if tt.expected != "" {
expected := deprecationPrefix(tt.level) + ": " + tt.expected
require.Equal(t, expected, actual)
} else {
require.Empty(t, actual)
}
})
}
}
func TestPluginOptionDeprecation(t *testing.T) {
var tests = []struct {
name string
since string
removal string
expected string
expectedLevel telegraf.LogLevel
}{
{
name: "Error level",
since: "1.23.0",
removal: "1.29.0",
expectedLevel: telegraf.Error,
expected: `Option "option" of plugin "test" deprecated since version 1.23.0 and will be removed in 1.29.0: please check`,
},
{
name: "Warn level",
since: "1.23.0",
removal: "2.0.0",
expectedLevel: telegraf.Warn,
expected: `Option "option" of plugin "test" deprecated since version 1.23.0 and will be removed in 2.0.0: please check`,
},
{
name: "No removal info",
since: "1.23.0",
expectedLevel: telegraf.Warn,
expected: `Option "option" of plugin "test" deprecated since version 1.23.0 and will be removed in 2.0.0: please check`,
},
{
name: "None",
expectedLevel: telegraf.None,
expected: ``,
},
}
// Fake telegraf's version
version, err := semver.NewVersion("1.30.0")
require.NoError(t, err)
telegrafVersion = version
// Switch the logger to log to a buffer
var buf bytes.Buffer
scanner := bufio.NewScanner(&buf)
previous := log.Writer()
log.SetOutput(&buf)
defer log.SetOutput(previous)
msg := make(chan string, 1)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
buf.Reset()
info := telegraf.DeprecationInfo{
Since: tt.since,
RemovalIn: tt.removal,
Notice: "please check",
}
PrintOptionDeprecationNotice("test", "option", info)
// Wait for a newline to arrive and timeout for cases where
// we don't see a message.
go func() {
scanner.Scan()
msg <- scanner.Text()
}()
// Reduce the timeout if we do not expect a message
timeout := 1 * time.Second
if tt.expected == "" {
timeout = 100 * time.Microsecond
}
var actual string
select {
case actual = <-msg:
case <-time.After(timeout):
}
if tt.expected != "" {
expected := deprecationPrefix(tt.expectedLevel) + ": " + tt.expected
require.Equal(t, expected, actual)
} else {
require.Empty(t, actual)
}
})
}
}
func TestPluginOptionValueDeprecation(t *testing.T) {
var tests = []struct {
name string
since string
removal string
value interface{}
expected string
expectedLevel telegraf.LogLevel
}{
{
name: "Error level",
since: "1.25.0",
removal: "1.29.0",
value: "foobar",
expected: `Value "foobar" for option "option" of plugin "test" deprecated since version 1.25.0 and will be removed in 1.29.0: please check`,
expectedLevel: telegraf.Error,
},
{
name: "Warn level",
since: "1.25.0",
removal: "2.0.0",
value: "foobar",
expected: `Value "foobar" for option "option" of plugin "test" deprecated since version 1.25.0 and will be removed in 2.0.0: please check`,
expectedLevel: telegraf.Warn,
},
{
name: "No removal info",
since: "1.25.0",
value: "foobar",
expected: `Value "foobar" for option "option" of plugin "test" deprecated since version 1.25.0 and will be removed in 2.0.0: please check`,
expectedLevel: telegraf.Warn,
},
{
name: "None",
expected: ``,
expectedLevel: telegraf.None,
},
{
name: "nil value",
since: "1.25.0",
removal: "1.29.0",
value: nil,
expected: `Value "<nil>" for option "option" of plugin "test" deprecated since version 1.25.0 and will be removed in 1.29.0: please check`,
expectedLevel: telegraf.Error,
},
{
name: "Boolean value",
since: "1.25.0",
removal: "1.29.0",
value: true,
expected: `Value "true" for option "option" of plugin "test" deprecated since version 1.25.0 and will be removed in 1.29.0: please check`,
expectedLevel: telegraf.Error,
},
{
name: "Integer value",
since: "1.25.0",
removal: "1.29.0",
value: 123,
expected: `Value "123" for option "option" of plugin "test" deprecated since version 1.25.0 and will be removed in 1.29.0: please check`,
expectedLevel: telegraf.Error,
},
}
// Fake telegraf's version
version, err := semver.NewVersion("1.30.0")
require.NoError(t, err)
telegrafVersion = version
// Switch the logger to log to a buffer
var buf bytes.Buffer
previous := log.Writer()
log.SetOutput(&buf)
defer log.SetOutput(previous)
timeout := 1 * time.Second
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
buf.Reset()
info := telegraf.DeprecationInfo{
Since: tt.since,
RemovalIn: tt.removal,
Notice: "please check",
}
PrintOptionValueDeprecationNotice("test", "option", tt.value, info)
if tt.expected != "" {
require.Eventually(t, func() bool {
return strings.HasSuffix(buf.String(), "\n")
}, timeout, 100*time.Millisecond)
// Remove the time for comparison
actual := strings.TrimSpace(buf.String())
expected := deprecationPrefix(tt.expectedLevel) + ": " + tt.expected
require.Equal(t, expected, actual)
} else {
time.Sleep(timeout)
require.Empty(t, buf.String())
}
})
}
}

252
config/envvar.go Normal file
View file

@ -0,0 +1,252 @@
package config
import (
"bytes"
"errors"
"io"
"os"
"strings"
"github.com/compose-spec/compose-go/template"
"github.com/compose-spec/compose-go/utils"
)
type trimmer struct {
input *bytes.Reader
output bytes.Buffer
}
func removeComments(buf []byte) ([]byte, error) {
t := &trimmer{
input: bytes.NewReader(buf),
output: bytes.Buffer{},
}
err := t.process()
return t.output.Bytes(), err
}
func (t *trimmer) process() error {
for {
// Read the next byte until EOF
c, err := t.input.ReadByte()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return err
}
// Switch states if we need to
switch c {
case '\\':
//nolint:errcheck // next byte is known
t.input.UnreadByte()
err = t.escape()
case '\'':
//nolint:errcheck // next byte is known
t.input.UnreadByte()
if t.hasNQuotes(c, 3) {
err = t.tripleSingleQuote()
} else {
err = t.singleQuote()
}
case '"':
//nolint:errcheck // next byte is known
t.input.UnreadByte()
if t.hasNQuotes(c, 3) {
err = t.tripleDoubleQuote()
} else {
err = t.doubleQuote()
}
case '#':
err = t.comment()
default:
t.output.WriteByte(c)
continue
}
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return err
}
}
return nil
}
func (t *trimmer) hasNQuotes(ref byte, limit int64) bool {
var count int64
// Look ahead check if the next characters are what we expect
for count = 0; count < limit; count++ {
c, err := t.input.ReadByte()
if err != nil || c != ref {
break
}
}
// We also need to unread the non-matching character
offset := -count
if count < limit {
offset--
}
//nolint:errcheck // Unread the already matched characters
t.input.Seek(offset, io.SeekCurrent)
return count >= limit
}
func (t *trimmer) readWriteByte() (byte, error) {
c, err := t.input.ReadByte()
if err != nil {
return 0, err
}
return c, t.output.WriteByte(c)
}
func (t *trimmer) escape() error {
//nolint:errcheck // Consume the known starting backslash and quote
t.readWriteByte()
// Read the next character which is the escaped one and exit
_, err := t.readWriteByte()
return err
}
func (t *trimmer) singleQuote() error {
//nolint:errcheck // Consume the known starting quote
t.readWriteByte()
// Read bytes until EOF, line end or another single quote
for {
if c, err := t.readWriteByte(); err != nil || c == '\'' || c == '\n' {
return err
}
}
}
func (t *trimmer) tripleSingleQuote() error {
for i := 0; i < 3; i++ {
//nolint:errcheck // Consume the known starting quotes
t.readWriteByte()
}
// Read bytes until EOF or another set of triple single quotes
for {
c, err := t.readWriteByte()
if err != nil {
return err
}
if c == '\'' && t.hasNQuotes('\'', 2) {
//nolint:errcheck // Consume the two additional ending quotes
t.readWriteByte()
//nolint:errcheck // Consume the two additional ending quotes
t.readWriteByte()
return nil
}
}
}
func (t *trimmer) doubleQuote() error {
//nolint:errcheck // Consume the known starting quote
t.readWriteByte()
// Read bytes until EOF, line end or another double quote
for {
c, err := t.input.ReadByte()
if err != nil {
return err
}
switch c {
case '\\':
//nolint:errcheck // Consume the found escaped character
t.input.UnreadByte()
if err := t.escape(); err != nil {
return err
}
continue
case '"', '\n':
// Found terminator
return t.output.WriteByte(c)
}
t.output.WriteByte(c)
}
}
func (t *trimmer) tripleDoubleQuote() error {
for i := 0; i < 3; i++ {
//nolint:errcheck // Consume the known starting quotes
t.readWriteByte()
}
// Read bytes until EOF or another set of triple double quotes
for {
c, err := t.input.ReadByte()
if err != nil {
return err
}
switch c {
case '\\':
//nolint:errcheck // Consume the found escape character
t.input.UnreadByte()
if err := t.escape(); err != nil {
return err
}
continue
case '"':
t.output.WriteByte(c)
if t.hasNQuotes('"', 2) {
//nolint:errcheck // Consume the two additional ending quotes
t.readWriteByte()
//nolint:errcheck // Consume the two additional ending quotes
t.readWriteByte()
return nil
}
continue
}
t.output.WriteByte(c)
}
}
func (t *trimmer) comment() error {
// Read bytes until EOF or a line break
for {
c, err := t.input.ReadByte()
if err != nil {
return err
}
if c == '\n' {
return t.output.WriteByte(c)
}
}
}
func substituteEnvironment(contents []byte, oldReplacementBehavior bool) ([]byte, error) {
options := []template.Option{
template.WithReplacementFunction(func(s string, m template.Mapping, cfg *template.Config) (string, error) {
result, applied, err := template.DefaultReplacementAppliedFunc(s, m, cfg)
if err == nil && !applied {
// Keep undeclared environment-variable patterns to reproduce
// pre-v1.27 behavior
return s, nil
}
if err != nil && strings.HasPrefix(err.Error(), "Invalid template:") {
// Keep invalid template patterns to ignore regexp substitutions
// like ${1}
return s, nil
}
return result, err
}),
template.WithoutLogging,
}
if oldReplacementBehavior {
options = append(options, template.WithPattern(oldVarRe))
}
envMap := utils.GetAsEqualsMap(os.Environ())
retVal, err := template.SubstituteWithOptions(string(contents), func(k string) (string, bool) {
if v, ok := envMap[k]; ok {
return v, ok
}
return "", false
}, options...)
return []byte(retVal), err
}

411
config/internal_test.go Normal file
View file

@ -0,0 +1,411 @@
package config
import (
"bytes"
"fmt"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestEnvironmentSubstitution(t *testing.T) {
tests := []struct {
name string
setEnv func(*testing.T)
contents string
expected string
wantErr bool
errSubstring string
}{
{
name: "Legacy with ${} and without {}",
setEnv: func(t *testing.T) {
t.Setenv("TEST_ENV1", "VALUE1")
t.Setenv("TEST_ENV2", "VALUE2")
},
contents: "A string with ${TEST_ENV1}, $TEST_ENV2 and $TEST_ENV1 as repeated",
expected: "A string with VALUE1, VALUE2 and VALUE1 as repeated",
},
{
name: "Env not set",
contents: "Env variable ${NOT_SET} will be empty",
expected: "Env variable ${NOT_SET} will be empty",
},
{
name: "Env not set, fallback to default",
contents: "Env variable ${THIS_IS_ABSENT:-Fallback}",
expected: "Env variable Fallback",
},
{
name: "No fallback",
setEnv: func(t *testing.T) {
t.Setenv("MY_ENV1", "VALUE1")
},
contents: "Env variable ${MY_ENV1:-Fallback}",
expected: "Env variable VALUE1",
},
{
name: "Mix and match",
setEnv: func(t *testing.T) {
t.Setenv("MY_VAR", "VALUE")
t.Setenv("MY_VAR2", "VALUE2")
},
contents: "Env var ${MY_VAR} is set, with $MY_VAR syntax and default on this ${MY_VAR1:-Substituted}, no default on this ${MY_VAR2:-NoDefault}",
expected: "Env var VALUE is set, with VALUE syntax and default on this Substituted, no default on this VALUE2",
},
{
name: "empty but set",
setEnv: func(t *testing.T) {
t.Setenv("EMPTY", "")
},
contents: "Contains ${EMPTY} nothing",
expected: "Contains nothing",
},
{
name: "Default has special chars",
contents: `Not recommended but supported ${MY_VAR:-Default with special chars Supported#$\"}`,
expected: `Not recommended but supported Default with special chars Supported#$\"`, // values are escaped
},
{
name: "unset error",
contents: "Contains ${THIS_IS_NOT_SET?unset-error}",
wantErr: true,
errSubstring: "unset-error",
},
{
name: "env empty error",
setEnv: func(t *testing.T) {
t.Setenv("ENV_EMPTY", "")
},
contents: "Contains ${ENV_EMPTY:?empty-error}",
wantErr: true,
errSubstring: "empty-error",
},
{
name: "Fallback as env variable",
setEnv: func(t *testing.T) {
t.Setenv("FALLBACK", "my-fallback")
},
contents: "Should output ${NOT_SET:-${FALLBACK}}",
expected: "Should output my-fallback",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.setEnv != nil {
tt.setEnv(t)
}
actual, err := substituteEnvironment([]byte(tt.contents), false)
if tt.wantErr {
require.ErrorContains(t, err, tt.errSubstring)
return
}
require.EqualValues(t, tt.expected, string(actual))
})
}
}
func TestEnvironmentSubstitutionOldBehavior(t *testing.T) {
tests := []struct {
name string
contents string
expected string
}{
{
name: "not defined no brackets",
contents: `my-da$tabase`,
expected: `my-da$tabase`,
},
{
name: "not defined brackets",
contents: `my-da${ta}base`,
expected: `my-da${ta}base`,
},
{
name: "not defined no brackets double dollar",
contents: `my-da$$tabase`,
expected: `my-da$$tabase`,
},
{
name: "not defined no brackets backslash",
contents: `my-da\$tabase`,
expected: `my-da\$tabase`,
},
{
name: "not defined brackets backslash",
contents: `my-da\${ta}base`,
expected: `my-da\${ta}base`,
},
{
name: "no brackets and suffix",
contents: `my-da$VARbase`,
expected: `my-da$VARbase`,
},
{
name: "no brackets",
contents: `my-da$VAR`,
expected: `my-dafoobar`,
},
{
name: "brackets",
contents: `my-da${VAR}base`,
expected: `my-dafoobarbase`,
},
{
name: "no brackets double dollar",
contents: `my-da$$VAR`,
expected: `my-da$foobar`,
},
{
name: "brackets double dollar",
contents: `my-da$${VAR}`,
expected: `my-da$foobar`,
},
{
name: "no brackets backslash",
contents: `my-da\$VAR`,
expected: `my-da\foobar`,
},
{
name: "brackets backslash",
contents: `my-da\${VAR}base`,
expected: `my-da\foobarbase`,
},
{
name: "fallback",
contents: `my-da${ta:-omg}base`,
expected: `my-daomgbase`,
},
{
name: "fallback env",
contents: `my-da${ta:-${FALLBACK}}base`,
expected: `my-dadefaultbase`,
},
{
name: "regex substitution",
contents: `${1}`,
expected: `${1}`,
},
{
name: "empty but set",
contents: "Contains ${EMPTY} nothing",
expected: "Contains nothing",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Setenv("VAR", "foobar")
t.Setenv("FALLBACK", "default")
t.Setenv("EMPTY", "")
actual, err := substituteEnvironment([]byte(tt.contents), true)
require.NoError(t, err)
require.EqualValues(t, tt.expected, string(actual))
})
}
}
func TestEnvironmentSubstitutionNewBehavior(t *testing.T) {
tests := []struct {
name string
contents string
expected string
}{
{
name: "not defined no brackets",
contents: `my-da$tabase`,
expected: `my-da$tabase`,
},
{
name: "not defined brackets",
contents: `my-da${ta}base`,
expected: `my-da${ta}base`,
},
{
name: "not defined no brackets double dollar",
contents: `my-da$$tabase`,
expected: `my-da$tabase`,
},
{
name: "not defined no brackets backslash",
contents: `my-da\$tabase`,
expected: `my-da\$tabase`,
},
{
name: "not defined brackets backslash",
contents: `my-da\${ta}base`,
expected: `my-da\${ta}base`,
},
{
name: "no brackets and suffix",
contents: `my-da$VARbase`,
expected: `my-da$VARbase`,
},
{
name: "no brackets",
contents: `my-da$VAR`,
expected: `my-dafoobar`,
},
{
name: "brackets",
contents: `my-da${VAR}base`,
expected: `my-dafoobarbase`,
},
{
name: "no brackets double dollar",
contents: `my-da$$VAR`,
expected: `my-da$VAR`,
},
{
name: "brackets double dollar",
contents: `my-da$${VAR}`,
expected: `my-da${VAR}`,
},
{
name: "no brackets backslash",
contents: `my-da\$VAR`,
expected: `my-da\foobar`,
},
{
name: "brackets backslash",
contents: `my-da\${VAR}base`,
expected: `my-da\foobarbase`,
},
{
name: "fallback",
contents: `my-da${ta:-omg}base`,
expected: `my-daomgbase`,
},
{
name: "fallback env",
contents: `my-da${ta:-${FALLBACK}}base`,
expected: `my-dadefaultbase`,
},
{
name: "regex substitution",
contents: `${1}`,
expected: `${1}`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Setenv("VAR", "foobar")
t.Setenv("FALLBACK", "default")
actual, err := substituteEnvironment([]byte(tt.contents), false)
require.NoError(t, err)
require.EqualValues(t, tt.expected, string(actual))
})
}
}
func TestParseConfig(t *testing.T) {
tests := []struct {
name string
setEnv func(*testing.T)
contents string
expected string
errmsg string
}{
{
name: "empty var name",
contents: `
# Environment variables can be used anywhere in this config file, simply surround
# them with ${}. For strings the variable must be within quotes (ie, "${STR_VAR}"),
# for numbers and booleans they should be plain (ie, ${INT_VAR}, ${BOOL_VAR})Should output ${NOT_SET:-${FALLBACK}}
`,
expected: "\n\n\n\n",
},
{
name: "comment in command (issue #13643)",
contents: `
[[inputs.exec]]
commands = ["echo \"abc#def\""]
`,
expected: `
[[inputs.exec]]
commands = ["echo \"abc#def\""]
`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.setEnv != nil {
tt.setEnv(t)
}
tbl, err := parseConfig([]byte(tt.contents))
if tt.errmsg != "" {
require.ErrorContains(t, err, tt.errmsg)
return
}
require.NoError(t, err)
if len(tt.expected) > 0 {
require.EqualValues(t, tt.expected, string(tbl.Data))
}
})
}
}
func TestRemoveComments(t *testing.T) {
// Read expectation
expected, err := os.ReadFile(filepath.Join("testdata", "envvar_comments_expected.toml"))
require.NoError(t, err)
// Read the file and remove the comments
buf, err := os.ReadFile(filepath.Join("testdata", "envvar_comments.toml"))
require.NoError(t, err)
removed, err := removeComments(buf)
require.NoError(t, err)
lines := bytes.Split(removed, []byte{'\n'})
for i, line := range lines {
lines[i] = bytes.TrimRight(line, " \t")
}
actual := bytes.Join(lines, []byte{'\n'})
// Do the comparison
require.Equal(t, string(expected), string(actual))
}
func TestURLRetries3Fails(t *testing.T) {
httpLoadConfigRetryInterval = 0 * time.Second
responseCounter := 0
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusNotFound)
responseCounter++
}))
defer ts.Close()
expected := fmt.Sprintf("loading config file %s failed: failed to fetch HTTP config: 404 Not Found", ts.URL)
c := NewConfig()
err := c.LoadConfig(ts.URL)
require.Error(t, err)
require.Equal(t, expected, err.Error())
require.Equal(t, 4, responseCounter)
}
func TestURLRetries3FailsThenPasses(t *testing.T) {
httpLoadConfigRetryInterval = 0 * time.Second
responseCounter := 0
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
if responseCounter <= 2 {
w.WriteHeader(http.StatusNotFound)
} else {
w.WriteHeader(http.StatusOK)
}
responseCounter++
}))
defer ts.Close()
c := NewConfig()
require.NoError(t, c.LoadConfig(ts.URL))
require.Equal(t, 4, responseCounter)
}

261
config/migration.go Normal file
View file

@ -0,0 +1,261 @@
package config
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"log"
"sort"
"strings"
"github.com/influxdata/toml"
"github.com/influxdata/toml/ast"
"github.com/influxdata/telegraf/migrations"
_ "github.com/influxdata/telegraf/migrations/all" // register all migrations
)
type section struct {
name string
begin int
content *ast.Table
raw *bytes.Buffer
}
func splitToSections(root *ast.Table) ([]section, error) {
var sections []section
for name, elements := range root.Fields {
switch name {
case "inputs", "outputs", "processors", "aggregators":
category, ok := elements.(*ast.Table)
if !ok {
return nil, fmt.Errorf("%q is not a table (%T)", name, category)
}
for plugin, elements := range category.Fields {
tbls, ok := elements.([]*ast.Table)
if !ok {
return nil, fmt.Errorf("elements of \"%s.%s\" is not a list of tables (%T)", name, plugin, elements)
}
for _, tbl := range tbls {
s := section{
name: name + "." + tbl.Name,
begin: tbl.Line,
content: tbl,
raw: &bytes.Buffer{},
}
sections = append(sections, s)
}
}
default:
tbl, ok := elements.(*ast.Table)
if !ok {
return nil, fmt.Errorf("%q is not a table (%T)", name, elements)
}
s := section{
name: name,
begin: tbl.Line,
content: tbl,
raw: &bytes.Buffer{},
}
sections = append(sections, s)
}
}
// Sort the TOML elements by begin (line-number)
sort.SliceStable(sections, func(i, j int) bool { return sections[i].begin < sections[j].begin })
return sections, nil
}
func assignTextToSections(data []byte, sections []section) ([]section, error) {
// Now assign the raw text to each section
if sections[0].begin > 0 {
sections = append([]section{{
name: "header",
begin: 0,
raw: &bytes.Buffer{},
}}, sections...)
}
var lineno int
scanner := bufio.NewScanner(bytes.NewBuffer(data))
for idx, next := range sections[1:] {
var buf bytes.Buffer
for lineno < next.begin-1 {
if !scanner.Scan() {
break
}
lineno++
line := strings.TrimSpace(scanner.Text())
if strings.HasPrefix(line, "#") {
buf.Write(scanner.Bytes())
buf.WriteString("\n")
continue
} else if buf.Len() > 0 {
if _, err := io.Copy(sections[idx].raw, &buf); err != nil {
return nil, fmt.Errorf("copying buffer failed: %w", err)
}
buf.Reset()
}
sections[idx].raw.Write(scanner.Bytes())
sections[idx].raw.WriteString("\n")
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("splitting by line failed: %w", err)
}
// If a comment is directly in front of the next section, without
// newline, the comment is assigned to the next section.
if buf.Len() > 0 {
if _, err := io.Copy(sections[idx+1].raw, &buf); err != nil {
return nil, fmt.Errorf("copying buffer failed: %w", err)
}
buf.Reset()
}
}
// Write the remaining to the last section
for scanner.Scan() {
sections[len(sections)-1].raw.Write(scanner.Bytes())
sections[len(sections)-1].raw.WriteString("\n")
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("splitting by line failed: %w", err)
}
return sections, nil
}
func ApplyMigrations(data []byte) ([]byte, uint64, error) {
root, err := toml.Parse(data)
if err != nil {
return nil, 0, fmt.Errorf("parsing failed: %w", err)
}
// Split the configuration into sections containing the location
// in the file.
sections, err := splitToSections(root)
if err != nil {
return nil, 0, fmt.Errorf("splitting to sections failed: %w", err)
}
if len(sections) == 0 {
return nil, 0, errors.New("no TOML configuration found")
}
// Assign the configuration text to the corresponding segments
sections, err = assignTextToSections(data, sections)
if err != nil {
return nil, 0, fmt.Errorf("assigning text failed: %w", err)
}
var applied uint64
// Do the actual global section migration(s)
for idx, s := range sections {
if strings.Contains(s.name, ".") {
continue
}
log.Printf("D! applying global migrations to section %q in line %d...", s.name, s.begin)
for _, migrate := range migrations.GlobalMigrations {
result, msg, err := migrate(s.name, s.content)
if err != nil {
if errors.Is(err, migrations.ErrNotApplicable) {
continue
}
return nil, 0, fmt.Errorf("migrating options of %q (line %d) failed: %w", s.name, s.begin, err)
}
if msg != "" {
log.Printf("I! Global section %q in line %d: %s", s.name, s.begin, msg)
}
s.raw = bytes.NewBuffer(result)
applied++
}
sections[idx] = s
}
// Do the actual plugin migration(s)
for idx, s := range sections {
migrate, found := migrations.PluginMigrations[s.name]
if !found {
continue
}
log.Printf("D! migrating plugin %q in line %d...", s.name, s.begin)
result, msg, err := migrate(s.content)
if err != nil {
return nil, 0, fmt.Errorf("migrating %q (line %d) failed: %w", s.name, s.begin, err)
}
if msg != "" {
log.Printf("I! Plugin %q in line %d: %s", s.name, s.begin, msg)
}
s.raw = bytes.NewBuffer(result)
tbl, err := toml.Parse(s.raw.Bytes())
if err != nil {
return nil, 0, fmt.Errorf("reparsing migrated %q (line %d) failed: %w", s.name, s.begin, err)
}
s.content = tbl
sections[idx] = s
applied++
}
// Do the actual plugin option migration(s)
for idx, s := range sections {
migrate, found := migrations.PluginOptionMigrations[s.name]
if !found {
continue
}
log.Printf("D! migrating options of plugin %q in line %d...", s.name, s.begin)
result, msg, err := migrate(s.content)
if err != nil {
if errors.Is(err, migrations.ErrNotApplicable) {
continue
}
return nil, 0, fmt.Errorf("migrating options of %q (line %d) failed: %w", s.name, s.begin, err)
}
if msg != "" {
log.Printf("I! Plugin %q in line %d: %s", s.name, s.begin, msg)
}
s.raw = bytes.NewBuffer(result)
sections[idx] = s
applied++
}
// Do general migrations applying to all plugins
for idx, s := range sections {
parts := strings.Split(s.name, ".")
if len(parts) != 2 {
continue
}
log.Printf("D! applying general migrations to plugin %q in line %d...", s.name, s.begin)
category, name := parts[0], parts[1]
for _, migrate := range migrations.GeneralMigrations {
result, msg, err := migrate(category, name, s.content)
if err != nil {
if errors.Is(err, migrations.ErrNotApplicable) {
continue
}
return nil, 0, fmt.Errorf("migrating options of %q (line %d) failed: %w", s.name, s.begin, err)
}
if msg != "" {
log.Printf("I! Plugin %q in line %d: %s", s.name, s.begin, msg)
}
s.raw = bytes.NewBuffer(result)
applied++
}
sections[idx] = s
}
// Reconstruct the config file from the sections
var buf bytes.Buffer
for _, s := range sections {
_, err = s.raw.WriteTo(&buf)
if err != nil {
return nil, applied, fmt.Errorf("joining output failed: %w", err)
}
}
return buf.Bytes(), applied, nil
}

79
config/plugin_id.go Normal file
View file

@ -0,0 +1,79 @@
package config
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"sort"
"github.com/influxdata/toml/ast"
)
type keyValuePair struct {
Key string
Value string
}
func processTable(parent string, table *ast.Table) ([]keyValuePair, error) {
var prefix string
var options []keyValuePair
if parent != "" {
prefix = parent + "."
}
for k, value := range table.Fields {
switch v := value.(type) {
case *ast.KeyValue:
key := prefix + k
options = append(options, keyValuePair{
Key: key,
Value: v.Value.Source(),
})
case *ast.Table:
key := prefix + k
children, err := processTable(key, v)
if err != nil {
return nil, fmt.Errorf("parsing table for %q failed: %w", key, err)
}
options = append(options, children...)
case []*ast.Table:
for i, t := range v {
key := fmt.Sprintf("%s#%d.%s", prefix, i, k)
children, err := processTable(key, t)
if err != nil {
return nil, fmt.Errorf("parsing table for %q #%d failed: %w", key, i, err)
}
options = append(options, children...)
}
default:
return nil, fmt.Errorf("unknown node type %T in key %q", value, prefix+k)
}
}
return options, nil
}
func generatePluginID(prefix string, table *ast.Table) (string, error) {
// We need to ensure that identically configured plugins _always_
// result in the same ID no matter which order the options are specified.
// This is even more relevant as Golang does _not_ give any guarantee
// on the ordering of maps.
// So we flatten out the configuration options (also for nested objects)
// and then sort the resulting array by the canonical key-name.
cfg, err := processTable("", table)
if err != nil {
return "", fmt.Errorf("processing AST failed: %w", err)
}
sort.SliceStable(cfg, func(i, j int) bool { return cfg[i].Key < cfg[j].Key })
// Hash the config options to get the ID. We also prefix the ID with
// the plugin name to prevent overlap with other plugin types.
hash := sha256.New()
hash.Write(append([]byte(prefix), 0))
for _, kv := range cfg {
hash.Write([]byte(kv.Key + ":" + kv.Value))
hash.Write([]byte{0})
}
return hex.EncodeToString(hash.Sum(nil)), nil
}

106
config/plugin_printer.go Normal file
View file

@ -0,0 +1,106 @@
package config
import (
"bytes"
"fmt"
"sort"
"strings"
"github.com/jedib0t/go-pretty/v6/table"
)
var headers = []string{"Name", "Source(s)"}
type pluginPrinter struct {
name string
source string
}
type pluginNames []pluginPrinter
func getPluginSourcesTable(pluginNames []pluginPrinter) string {
if !PrintPluginConfigSource {
return ""
}
if len(pluginNames) == 0 {
return ""
}
data := make([][]any, 0, len(pluginNames))
rows := make(map[string][]string)
for _, plugin := range pluginNames {
if _, ok := rows[plugin.name]; !ok {
rows[plugin.name] = make([]string, 0)
}
rows[plugin.name] = append(rows[plugin.name], plugin.source)
}
for name, sources := range rows {
var nameCountStr string
if len(sources) > 1 {
nameCountStr = fmt.Sprintf("%s (%dx)", name, len(sources))
} else {
nameCountStr = name
}
data = append(data, []any{nameCountStr, sources})
}
sort.Slice(data, func(i, j int) bool {
return len(data[i][1].([]string)) > len(data[j][1].([]string))
})
return getTableString(headers, data)
}
func getTableString(headers []string, data [][]any) string {
buff := new(bytes.Buffer)
t := table.NewWriter()
t.SetOutputMirror(buff)
t.AppendHeader(convertToRow(headers))
// Append rows
for _, row := range data {
processedRow := make([]interface{}, len(row))
for i, col := range row {
switch v := col.(type) {
case []string: // Convert slices to multi-line strings
var source map[string]int
for _, s := range v {
if source == nil {
source = make(map[string]int)
}
source[s]++
}
// sort the sources according to the count
sources := make([]string, 0, len(source))
for s := range source {
sources = append(sources, s)
}
sort.Slice(sources, func(i, j int) bool {
return source[sources[i]] > source[sources[j]]
})
for i, s := range sources {
if source[s] > 1 {
sources[i] = fmt.Sprintf("%s (%dx)", s, source[s])
}
}
processedRow[i] = strings.Join(sources, "\n")
default:
processedRow[i] = v
}
}
t.AppendRow(processedRow)
}
t.Style().Options.SeparateRows = true
return t.Render()
}
// Helper function to convert headers to table.Row
func convertToRow(data []string) table.Row {
row := make(table.Row, len(data))
for i, val := range data {
row[i] = val
}
return row
}

315
config/secret.go Normal file
View file

@ -0,0 +1,315 @@
package config
import (
"fmt"
"log"
"regexp"
"strings"
"sync/atomic"
"github.com/influxdata/telegraf"
)
// unlinkedSecrets contains the list of secrets that contain
// references not yet linked to their corresponding secret-store.
// Those secrets must later (after reading the config) be linked
// by the config to their respective secret-stores.
// Secrets containing constant strings will not be found in this
// list.
var unlinkedSecrets = make([]*Secret, 0)
// secretStorePattern is a regex to validate secret-store IDs
var secretStorePattern = regexp.MustCompile(`^\w+$`)
// secretPattern is a regex to extract references to secrets store in a secret-store
var secretPattern = regexp.MustCompile(`@\{(\w+:\w+)\}`)
// secretCandidatePattern is a regex to find secret candidates to warn users on invalid characters in references
var secretCandidatePattern = regexp.MustCompile(`@\{.+?:.+?}`)
// secretCount is the number of secrets use in Telegraf
var secretCount atomic.Int64
// selectedImpl is the configured implementation for secrets
var selectedImpl secretImpl = &protectedSecretImpl{}
// secretImpl represents an abstraction for different implementations of secrets
type secretImpl interface {
Container(secret []byte) secretContainer
EmptyBuffer() SecretBuffer
Wipe(secret []byte)
}
func EnableSecretProtection() {
selectedImpl = &protectedSecretImpl{}
}
func DisableSecretProtection() {
selectedImpl = &unprotectedSecretImpl{}
}
// secretContainer represents an abstraction of the container holding the
// actual secret value
type secretContainer interface {
Destroy()
Equals(ref []byte) (bool, error)
Buffer() (SecretBuffer, error)
AsBuffer(secret []byte) SecretBuffer
Replace(secret []byte)
}
// SecretBuffer allows to access the content of the secret
type SecretBuffer interface {
// Size returns the length of the buffer content
Size() int
// Grow will grow the capacity of the underlying buffer to the given size
Grow(capacity int)
// Bytes returns the content of the buffer as bytes.
// NOTE: The returned bytes shall NOT be accessed after destroying the
// buffer using 'Destroy()' as the underlying the memory area might be
// wiped and invalid.
Bytes() []byte
// TemporaryString returns the content of the buffer as a string.
// NOTE: The returned String shall NOT be accessed after destroying the
// buffer using 'Destroy()' as the underlying the memory area might be
// wiped and invalid.
TemporaryString() string
// String returns a copy of the underlying buffer's content as string.
// It is safe to use the returned value after destroying the buffer.
String() string
// Destroy will wipe the buffer's content and destroy the underlying
// buffer. Do not access the buffer after destroying it.
Destroy()
}
// Secret safely stores sensitive data such as a password or token
type Secret struct {
// container is the implementation for holding the secret. It can be
// protected or not depending on the concrete implementation.
container secretContainer
// resolvers are the functions for resolving a given secret-id (key)
resolvers map[string]telegraf.ResolveFunc
// unlinked contains all references in the secret that are not yet
// linked to the corresponding secret store.
unlinked []string
// notempty denotes if the secret is completely empty
notempty bool
}
// NewSecret creates a new secret from the given bytes
func NewSecret(b []byte) Secret {
s := Secret{}
s.init(b)
return s
}
// UnmarshalText creates a secret from a toml value following the "string" rule.
func (s *Secret) UnmarshalText(b []byte) error {
// Unmarshal secret from TOML and put it into protected memory
s.init(b)
// Keep track of secrets that contain references to secret-stores
// for later resolving by the config.
if len(s.unlinked) > 0 && s.notempty {
unlinkedSecrets = append(unlinkedSecrets, s)
}
return nil
}
// Initialize the secret content
func (s *Secret) init(secret []byte) {
// Keep track of the number of secrets...
secretCount.Add(1)
// Remember if the secret is completely empty
s.notempty = len(secret) != 0
// Find all secret candidates and check if they are really a valid
// reference. Otherwise issue a warning to let the user know that there is
// a potential issue with their secret instead of silently ignoring it.
candidates := secretCandidatePattern.FindAllString(string(secret), -1)
s.unlinked = make([]string, 0, len(candidates))
for _, c := range candidates {
if secretPattern.MatchString(c) {
s.unlinked = append(s.unlinked, c)
} else {
log.Printf("W! Secret %q contains invalid character(s), only letters, digits and underscores are allowed.", c)
}
}
s.resolvers = nil
// Setup the container implementation
s.container = selectedImpl.Container(secret)
}
// Destroy the secret content
func (s *Secret) Destroy() {
s.resolvers = nil
s.unlinked = nil
s.notempty = false
if s.container != nil {
s.container.Destroy()
s.container = nil
// Keep track of the number of used secrets...
secretCount.Add(-1)
}
}
// Empty return if the secret is completely empty
func (s *Secret) Empty() bool {
return !s.notempty
}
// EqualTo performs a constant-time comparison of the secret to the given reference
func (s *Secret) EqualTo(ref []byte) (bool, error) {
if s.container == nil {
return false, nil
}
if len(s.unlinked) > 0 {
return false, fmt.Errorf("unlinked parts in secret: %v", strings.Join(s.unlinked, ";"))
}
return s.container.Equals(ref)
}
// Get return the string representation of the secret
func (s *Secret) Get() (SecretBuffer, error) {
if s.container == nil {
return selectedImpl.EmptyBuffer(), nil
}
if len(s.unlinked) > 0 {
return nil, fmt.Errorf("unlinked parts in secret: %v", strings.Join(s.unlinked, ";"))
}
// Decrypt the secret so we can return it
buffer, err := s.container.Buffer()
if err != nil {
return nil, err
}
// We've got a static secret so simply return the buffer
if len(s.resolvers) == 0 {
return buffer, nil
}
defer buffer.Destroy()
replaceErrs := make([]string, 0)
newsecret := secretPattern.ReplaceAllFunc(buffer.Bytes(), func(match []byte) []byte {
resolver, found := s.resolvers[string(match)]
if !found {
replaceErrs = append(replaceErrs, fmt.Sprintf("no resolver for %q", match))
return match
}
replacement, _, err := resolver()
if err != nil {
replaceErrs = append(replaceErrs, fmt.Sprintf("resolving %q failed: %v", match, err))
return match
}
return replacement
})
if len(replaceErrs) > 0 {
selectedImpl.Wipe(newsecret)
return nil, fmt.Errorf("replacing secrets failed: %s", strings.Join(replaceErrs, ";"))
}
return s.container.AsBuffer(newsecret), nil
}
// Set overwrites the secret's value with a new one. Please note, the secret
// is not linked again, so only references to secret-stores can be used, e.g. by
// adding more clear-text or reordering secrets.
func (s *Secret) Set(value []byte) error {
// Link the new value can be resolved
secret, res, replaceErrs := resolve(value, s.resolvers)
if len(replaceErrs) > 0 {
return fmt.Errorf("linking new secrets failed: %s", strings.Join(replaceErrs, ";"))
}
// Set the new secret
s.container.Replace(secret)
s.resolvers = res
s.notempty = len(value) > 0
return nil
}
// GetUnlinked return the parts of the secret that is not yet linked to a resolver
func (s *Secret) GetUnlinked() []string {
return s.unlinked
}
// Link used the given resolver map to link the secret parts to their
// secret-store resolvers.
func (s *Secret) Link(resolvers map[string]telegraf.ResolveFunc) error {
// Decrypt the secret so we can return it
if s.container == nil {
return nil
}
buffer, err := s.container.Buffer()
if err != nil {
return err
}
defer buffer.Destroy()
// Iterate through the parts and try to resolve them. For static parts
// we directly replace them, while for dynamic ones we store the resolver.
newsecret, res, replaceErrs := resolve(buffer.Bytes(), resolvers)
if len(replaceErrs) > 0 {
return fmt.Errorf("linking secrets failed: %s", strings.Join(replaceErrs, ";"))
}
s.resolvers = res
// Store the secret if it has changed
if buffer.TemporaryString() != string(newsecret) {
s.container.Replace(newsecret)
}
// All linked now
s.unlinked = nil
return nil
}
func resolve(secret []byte, resolvers map[string]telegraf.ResolveFunc) ([]byte, map[string]telegraf.ResolveFunc, []string) {
// Iterate through the parts and try to resolve them. For static parts
// we directly replace them, while for dynamic ones we store the resolver.
replaceErrs := make([]string, 0)
remaining := make(map[string]telegraf.ResolveFunc)
newsecret := secretPattern.ReplaceAllFunc(secret, func(match []byte) []byte {
resolver, found := resolvers[string(match)]
if !found {
replaceErrs = append(replaceErrs, fmt.Sprintf("unlinked part %q", match))
return match
}
replacement, dynamic, err := resolver()
if err != nil {
replaceErrs = append(replaceErrs, fmt.Sprintf("resolving %q failed: %v", match, err))
return match
}
// Replace static parts right away
if !dynamic {
return replacement
}
// Keep the resolver for dynamic secrets
remaining[string(match)] = resolver
return match
})
return newsecret, remaining, replaceErrs
}
func splitLink(s string) (storeID, key string) {
// There should _ALWAYS_ be two parts due to the regular expression match
parts := strings.SplitN(s[2:len(s)-1], ":", 2)
return parts[0], parts[1]
}

131
config/secret_protected.go Normal file
View file

@ -0,0 +1,131 @@
package config
import (
"fmt"
"github.com/awnumar/memguard"
)
type protectedSecretImpl struct{}
func (*protectedSecretImpl) Container(secret []byte) secretContainer {
return &protectedSecretContainer{
enclave: memguard.NewEnclave(secret),
}
}
func (*protectedSecretImpl) EmptyBuffer() SecretBuffer {
return &lockedBuffer{}
}
func (*protectedSecretImpl) Wipe(secret []byte) {
memguard.WipeBytes(secret)
}
type lockedBuffer struct {
buf *memguard.LockedBuffer
}
func (lb *lockedBuffer) Size() int {
if lb.buf == nil {
return 0
}
return lb.buf.Size()
}
func (lb *lockedBuffer) Grow(capacity int) {
size := lb.Size()
if capacity <= size {
return
}
buf := memguard.NewBuffer(capacity)
if lb.buf != nil {
buf.Copy(lb.buf.Bytes())
}
lb.buf.Destroy()
lb.buf = buf
}
func (lb *lockedBuffer) Bytes() []byte {
if lb.buf == nil {
return nil
}
return lb.buf.Bytes()
}
func (lb *lockedBuffer) TemporaryString() string {
if lb.buf == nil {
return ""
}
return lb.buf.String()
}
func (lb *lockedBuffer) String() string {
if lb.buf == nil {
return ""
}
return string(lb.buf.Bytes())
}
func (lb *lockedBuffer) Destroy() {
if lb.buf == nil {
return
}
lb.buf.Destroy()
lb.buf = nil
}
type protectedSecretContainer struct {
enclave *memguard.Enclave
}
func (c *protectedSecretContainer) Destroy() {
if c.enclave == nil {
return
}
// Wipe the secret from memory
lockbuf, err := c.enclave.Open()
if err == nil {
lockbuf.Destroy()
}
c.enclave = nil
}
func (c *protectedSecretContainer) Equals(ref []byte) (bool, error) {
if c.enclave == nil {
return false, nil
}
// Get a locked-buffer of the secret to perform the comparison
lockbuf, err := c.enclave.Open()
if err != nil {
return false, fmt.Errorf("opening enclave failed: %w", err)
}
defer lockbuf.Destroy()
return lockbuf.EqualTo(ref), nil
}
func (c *protectedSecretContainer) Buffer() (SecretBuffer, error) {
if c.enclave == nil {
return &lockedBuffer{}, nil
}
// Get a locked-buffer of the secret to perform the comparison
lockbuf, err := c.enclave.Open()
if err != nil {
return nil, fmt.Errorf("opening enclave failed: %w", err)
}
return &lockedBuffer{lockbuf}, nil
}
func (*protectedSecretContainer) AsBuffer(secret []byte) SecretBuffer {
return &lockedBuffer{memguard.NewBufferFromBytes(secret)}
}
func (c *protectedSecretContainer) Replace(secret []byte) {
c.enclave = memguard.NewEnclave(secret)
}

845
config/secret_test.go Normal file
View file

@ -0,0 +1,845 @@
package config
import (
"bytes"
"errors"
"fmt"
"log"
"testing"
"github.com/awnumar/memguard"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/inputs"
"github.com/influxdata/telegraf/plugins/secretstores"
)
func TestSecretConstantManually(t *testing.T) {
mysecret := "a wonderful test"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
retrieved, err := s.Get()
require.NoError(t, err)
defer retrieved.Destroy()
require.EqualValues(t, mysecret, retrieved.TemporaryString())
}
func TestLinking(t *testing.T) {
mysecret := "a @{referenced:secret}"
resolvers := map[string]telegraf.ResolveFunc{
"@{referenced:secret}": func() ([]byte, bool, error) {
return []byte("resolved secret"), false, nil
},
}
s := NewSecret([]byte(mysecret))
defer s.Destroy()
require.NoError(t, s.Link(resolvers))
retrieved, err := s.Get()
require.NoError(t, err)
defer retrieved.Destroy()
require.EqualValues(t, "a resolved secret", retrieved.TemporaryString())
}
func TestLinkingResolverError(t *testing.T) {
mysecret := "a @{referenced:secret}"
resolvers := map[string]telegraf.ResolveFunc{
"@{referenced:secret}": func() ([]byte, bool, error) {
return nil, false, errors.New("broken")
},
}
s := NewSecret([]byte(mysecret))
defer s.Destroy()
expected := `linking secrets failed: resolving "@{referenced:secret}" failed: broken`
require.EqualError(t, s.Link(resolvers), expected)
}
func TestGettingUnlinked(t *testing.T) {
mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
_, err := s.Get()
require.ErrorContains(t, err, "unlinked parts in secret")
}
func TestGettingMissingResolver(t *testing.T) {
mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
s.unlinked = make([]string, 0)
s.resolvers = map[string]telegraf.ResolveFunc{
"@{a:dummy}": func() ([]byte, bool, error) {
return nil, false, nil
},
}
_, err := s.Get()
expected := `replacing secrets failed: no resolver for "@{referenced:secret}"`
require.EqualError(t, err, expected)
}
func TestGettingResolverError(t *testing.T) {
mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
s.unlinked = make([]string, 0)
s.resolvers = map[string]telegraf.ResolveFunc{
"@{referenced:secret}": func() ([]byte, bool, error) {
return nil, false, errors.New("broken")
},
}
_, err := s.Get()
expected := `replacing secrets failed: resolving "@{referenced:secret}" failed: broken`
require.EqualError(t, err, expected)
}
func TestUninitializedEnclave(t *testing.T) {
s := Secret{}
defer s.Destroy()
require.NoError(t, s.Link(map[string]telegraf.ResolveFunc{}))
retrieved, err := s.Get()
require.NoError(t, err)
defer retrieved.Destroy()
require.Empty(t, retrieved.Bytes())
}
func TestEnclaveOpenError(t *testing.T) {
mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
memguard.Purge()
err := s.Link(map[string]telegraf.ResolveFunc{})
require.ErrorContains(t, err, "opening enclave failed")
s.unlinked = make([]string, 0)
_, err = s.Get()
require.ErrorContains(t, err, "opening enclave failed")
}
func TestMissingResolver(t *testing.T) {
mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
err := s.Link(map[string]telegraf.ResolveFunc{})
require.ErrorContains(t, err, "linking secrets failed: unlinked part")
}
func TestSecretConstant(t *testing.T) {
tests := []struct {
name string
cfg []byte
expected string
}{
{
name: "simple string",
cfg: []byte(`
[[inputs.mockup]]
secret = "a secret"
`),
expected: "a secret",
},
{
name: "mail address",
cfg: []byte(`
[[inputs.mockup]]
secret = "someone@mock.org"
`),
expected: "someone@mock.org",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := NewConfig()
require.NoError(t, c.LoadConfigData(tt.cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{"mock": []byte("fail")},
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, tt.expected, secret.TemporaryString())
})
}
}
func TestSecretUnquote(t *testing.T) {
tests := []struct {
name string
cfg []byte
}{
{
name: "single quotes",
cfg: []byte(`
[[inputs.mockup]]
secret = 'a secret'
expected = 'a secret'
`),
},
{
name: "double quotes",
cfg: []byte(`
[[inputs.mockup]]
secret = "a secret"
expected = "a secret"
`),
},
{
name: "triple single quotes",
cfg: []byte(`
[[inputs.mockup]]
secret = '''a secret'''
expected = '''a secret'''
`),
},
{
name: "triple double quotes",
cfg: []byte(`
[[inputs.mockup]]
secret = """a secret"""
expected = """a secret"""
`),
},
{
name: "escaped double quotes",
cfg: []byte(`
[[inputs.mockup]]
secret = "\"a secret\""
expected = "\"a secret\""
`),
},
{
name: "mix double-single quotes (single)",
cfg: []byte(`
[[inputs.mockup]]
secret = "'a secret'"
expected = "'a secret'"
`),
},
{
name: "mix single-double quotes (single)",
cfg: []byte(`
[[inputs.mockup]]
secret = '"a secret"'
expected = '"a secret"'
`),
},
{
name: "mix double-single quotes (triple-single)",
cfg: []byte(`
[[inputs.mockup]]
secret = """'a secret'"""
expected = """'a secret'"""
`),
},
{
name: "mix single-double quotes (triple-single)",
cfg: []byte(`
[[inputs.mockup]]
secret = '''"a secret"'''
expected = '''"a secret"'''
`),
},
{
name: "mix double-single quotes (triple)",
cfg: []byte(`
[[inputs.mockup]]
secret = """'''a secret'''"""
expected = """'''a secret'''"""
`),
},
{
name: "mix single-double quotes (triple)",
cfg: []byte(`
[[inputs.mockup]]
secret = '''"""a secret"""'''
expected = '''"""a secret"""'''
`),
},
{
name: "single quotes with backslashes",
cfg: []byte(`
[[inputs.mockup]]
secret = 'Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;'
expected = 'Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;'
`),
},
{
name: "double quotes with backslashes",
cfg: []byte(`
[[inputs.mockup]]
secret = "Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;"
expected = "Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;"
`),
},
{
name: "triple single quotes with backslashes",
cfg: []byte(`
[[inputs.mockup]]
secret = '''Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;'''
expected = '''Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;'''
`),
},
{
name: "triple double quotes with backslashes",
cfg: []byte(`
[[inputs.mockup]]
secret = """Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;"""
expected = """Server=SQLTELEGRAF\\SQL2022;app name=telegraf;log=1;"""
`),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := NewConfig()
require.NoError(t, c.LoadConfigData(tt.cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{},
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, plugin.Expected, secret.TemporaryString())
})
}
}
func TestSecretEnvironmentVariable(t *testing.T) {
cfg := []byte(`
[[inputs.mockup]]
secret = "$SOME_ENV_SECRET"
`)
t.Setenv("SOME_ENV_SECRET", "an env secret")
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{},
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, "an env secret", secret.TemporaryString())
}
func TestSecretCount(t *testing.T) {
secretCount.Store(0)
cfg := []byte(`
[[inputs.mockup]]
[[inputs.mockup]]
secret = "a secret"
[[inputs.mockup]]
secret = "another secret"
`)
c := NewConfig()
require.NoError(t, c.LoadConfigData(cfg, EmptySourcePath))
require.Len(t, c.Inputs, 3)
require.Equal(t, int64(2), secretCount.Load())
// Remove all secrets and check
for _, ri := range c.Inputs {
input := ri.Input.(*MockupSecretPlugin)
input.Secret.Destroy()
}
require.Equal(t, int64(0), secretCount.Load())
}
func TestSecretStoreStatic(t *testing.T) {
cfg := []byte(
`
[[inputs.mockup]]
secret = "@{mock:secret1}"
[[inputs.mockup]]
secret = "@{mock:secret2}"
[[inputs.mockup]]
secret = "@{mock:a_strange_secret}"
[[inputs.mockup]]
secret = "@{mock:a_weird_secret}"
`)
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 4)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{
"secret1": []byte("Ood Bnar"),
"secret2": []byte("Thon"),
"a_strange_secret": []byte("Obi-Wan Kenobi"),
"a_weird_secret": []byte("Arca Jeth"),
},
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
expected := []string{"Ood Bnar", "Thon", "Obi-Wan Kenobi", "Arca Jeth"}
for i, input := range c.Inputs {
plugin := input.Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
require.EqualValues(t, expected[i], secret.TemporaryString())
secret.Destroy()
}
}
func TestSecretStoreInvalidKeys(t *testing.T) {
cfg := []byte(
`
[[inputs.mockup]]
secret = "@{mock:}"
[[inputs.mockup]]
secret = "@{mock:wild?%go}"
[[inputs.mockup]]
secret = "@{mock:a-strange-secret}"
[[inputs.mockup]]
secret = "@{mock:a weird secret}"
`)
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 4)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{
"": []byte("Ood Bnar"),
"wild?%go": []byte("Thon"),
"a-strange-secret": []byte("Obi-Wan Kenobi"),
"a weird secret": []byte("Arca Jeth"),
},
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
expected := []string{
"@{mock:}",
"@{mock:wild?%go}",
"@{mock:a-strange-secret}",
"@{mock:a weird secret}",
}
for i, input := range c.Inputs {
plugin := input.Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
require.EqualValues(t, expected[i], secret.TemporaryString())
secret.Destroy()
}
}
func TestSecretStoreDeclarationMissingID(t *testing.T) {
defer func() { unlinkedSecrets = make([]*Secret, 0) }()
cfg := []byte(`[[secretstores.mockup]]`)
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.ErrorContains(t, err, `error parsing mockup, "mockup" secret-store without ID`)
}
func TestSecretStoreDeclarationInvalidID(t *testing.T) {
defer func() { unlinkedSecrets = make([]*Secret, 0) }()
invalidIDs := []string{"foo.bar", "dummy-123", "test!", "wohoo+"}
tmpl := `
[[secretstores.mockup]]
id = %q
`
for _, id := range invalidIDs {
t.Run(id, func(t *testing.T) {
cfg := []byte(fmt.Sprintf(tmpl, id))
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.ErrorContains(t, err, `error parsing mockup, invalid secret-store ID`)
})
}
}
func TestSecretStoreDeclarationValidID(t *testing.T) {
defer func() { unlinkedSecrets = make([]*Secret, 0) }()
validIDs := []string{"foobar", "dummy123", "test_id", "W0Hoo_lala123"}
tmpl := `
[[secretstores.mockup]]
id = %q
`
for _, id := range validIDs {
t.Run(id, func(t *testing.T) {
cfg := []byte(fmt.Sprintf(tmpl, id))
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.NoError(t, err)
})
}
}
type SecretImplTestSuite struct {
suite.Suite
protected bool
}
func (tsuite *SecretImplTestSuite) SetupSuite() {
if tsuite.protected {
EnableSecretProtection()
} else {
DisableSecretProtection()
}
}
func (*SecretImplTestSuite) TearDownSuite() {
EnableSecretProtection()
}
func (*SecretImplTestSuite) TearDownTest() {
unlinkedSecrets = make([]*Secret, 0)
}
func (tsuite *SecretImplTestSuite) TestSecretEqualTo() {
t := tsuite.T()
mysecret := "a wonderful test"
s := NewSecret([]byte(mysecret))
defer s.Destroy()
equal, err := s.EqualTo([]byte(mysecret))
require.NoError(t, err)
require.True(t, equal)
equal, err = s.EqualTo([]byte("some random text"))
require.NoError(t, err)
require.False(t, equal)
}
func (tsuite *SecretImplTestSuite) TestSecretStoreInvalidReference() {
t := tsuite.T()
cfg := []byte(
`
[[inputs.mockup]]
secret = "@{mock:test}"
`)
c := NewConfig()
require.NoError(t, c.LoadConfigData(cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{"test": []byte("Arca Jeth")},
}
require.NoError(t, store.Init())
c.SecretStores["foo"] = store
err := c.LinkSecrets()
require.EqualError(t, err, `unknown secret-store for "@{mock:test}"`)
for _, input := range c.Inputs {
plugin := input.Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.EqualError(t, err, `unlinked parts in secret: @{mock:test}`)
require.Empty(t, secret)
}
}
func (tsuite *SecretImplTestSuite) TestSecretStoreStaticChanging() {
t := tsuite.T()
cfg := []byte(
`
[[inputs.mockup]]
secret = "@{mock:secret}"
`)
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{"secret": []byte("Ood Bnar")},
Dynamic: false,
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
sequence := []string{"Ood Bnar", "Thon", "Obi-Wan Kenobi", "Arca Jeth"}
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, "Ood Bnar", secret.TemporaryString())
for _, v := range sequence {
store.Secrets["secret"] = []byte(v)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
// The secret should not change as the store is marked non-dyamic!
require.EqualValues(t, "Ood Bnar", secret.TemporaryString())
secret.Destroy()
}
}
func (tsuite *SecretImplTestSuite) TestSecretStoreDynamic() {
t := tsuite.T()
cfg := []byte(
`
[[inputs.mockup]]
secret = "@{mock:secret}"
`)
c := NewConfig()
err := c.LoadConfigData(cfg, EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{"secret": []byte("Ood Bnar")},
Dynamic: true,
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
sequence := []string{"Ood Bnar", "Thon", "Obi-Wan Kenobi", "Arca Jeth"}
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
for _, v := range sequence {
store.Secrets["secret"] = []byte(v)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
// The secret should not change as the store is marked non-dynamic!
require.EqualValues(t, v, secret.TemporaryString())
secret.Destroy()
}
}
func (tsuite *SecretImplTestSuite) TestSecretSet() {
t := tsuite.T()
cfg := []byte(`
[[inputs.mockup]]
secret = "a secret"
`)
c := NewConfig()
require.NoError(t, c.LoadConfigData(cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
require.NoError(t, c.LinkSecrets())
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, "a secret", secret.TemporaryString())
require.NoError(t, plugin.Secret.Set([]byte("another secret")))
newsecret, err := plugin.Secret.Get()
require.NoError(t, err)
defer newsecret.Destroy()
require.EqualValues(t, "another secret", newsecret.TemporaryString())
}
func (tsuite *SecretImplTestSuite) TestSecretSetResolve() {
t := tsuite.T()
cfg := []byte(`
[[inputs.mockup]]
secret = "@{mock:secret}"
`)
c := NewConfig()
require.NoError(t, c.LoadConfigData(cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{"secret": []byte("Ood Bnar")},
Dynamic: true,
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, "Ood Bnar", secret.TemporaryString())
require.NoError(t, plugin.Secret.Set([]byte("@{mock:secret} is cool")))
newsecret, err := plugin.Secret.Get()
require.NoError(t, err)
defer newsecret.Destroy()
require.EqualValues(t, "Ood Bnar is cool", newsecret.TemporaryString())
}
func (tsuite *SecretImplTestSuite) TestSecretSetResolveInvalid() {
t := tsuite.T()
cfg := []byte(`
[[inputs.mockup]]
secret = "@{mock:secret}"
`)
c := NewConfig()
require.NoError(t, c.LoadConfigData(cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
// Create a mockup secretstore
store := &MockupSecretStore{
Secrets: map[string][]byte{"secret": []byte("Ood Bnar")},
Dynamic: true,
}
require.NoError(t, store.Init())
c.SecretStores["mock"] = store
require.NoError(t, c.LinkSecrets())
plugin := c.Inputs[0].Input.(*MockupSecretPlugin)
secret, err := plugin.Secret.Get()
require.NoError(t, err)
defer secret.Destroy()
require.EqualValues(t, "Ood Bnar", secret.TemporaryString())
err = plugin.Secret.Set([]byte("@{mock:another_secret}"))
require.ErrorContains(t, err, `linking new secrets failed: unlinked part "@{mock:another_secret}"`)
}
func (tsuite *SecretImplTestSuite) TestSecretInvalidWarn() {
t := tsuite.T()
// Intercept the log output
var buf bytes.Buffer
backup := log.Writer()
log.SetOutput(&buf)
defer log.SetOutput(backup)
cfg := []byte(`
[[inputs.mockup]]
secret = "server=a user=@{mock:secret-with-invalid-chars} pass=@{mock:secret_pass}"
`)
c := NewConfig()
require.NoError(t, c.LoadConfigData(cfg, EmptySourcePath))
require.Len(t, c.Inputs, 1)
require.Contains(t, buf.String(), `W! Secret "@{mock:secret-with-invalid-chars}" contains invalid character(s)`)
require.NotContains(t, buf.String(), "@{mock:secret_pass}")
}
func TestSecretImplUnprotected(t *testing.T) {
impl := &unprotectedSecretImpl{}
container := impl.Container([]byte("foobar"))
require.NotNil(t, container)
c, ok := container.(*unprotectedSecretContainer)
require.True(t, ok)
require.Equal(t, "foobar", string(c.buf.content))
buf, err := container.Buffer()
require.NoError(t, err)
require.NotNil(t, buf)
require.Equal(t, []byte("foobar"), buf.Bytes())
require.Equal(t, "foobar", buf.TemporaryString())
require.Equal(t, "foobar", buf.String())
}
func TestSecretImplTestSuiteUnprotected(t *testing.T) {
suite.Run(t, &SecretImplTestSuite{protected: false})
}
func TestSecretImplTestSuiteProtected(t *testing.T) {
suite.Run(t, &SecretImplTestSuite{protected: true})
}
// Mockup (input) plugin for testing to avoid cyclic dependencies
type MockupSecretPlugin struct {
Secret Secret `toml:"secret"`
Expected string `toml:"expected"`
}
func (*MockupSecretPlugin) SampleConfig() string { return "Mockup test secret plugin" }
func (*MockupSecretPlugin) Gather(_ telegraf.Accumulator) error { return nil }
type MockupSecretStore struct {
Secrets map[string][]byte
Dynamic bool
}
func (*MockupSecretStore) Init() error {
return nil
}
func (*MockupSecretStore) SampleConfig() string {
return "Mockup test secret plugin"
}
func (s *MockupSecretStore) Get(key string) ([]byte, error) {
v, found := s.Secrets[key]
if !found {
return nil, errors.New("not found")
}
return v, nil
}
func (s *MockupSecretStore) Set(key, value string) error {
s.Secrets[key] = []byte(value)
return nil
}
func (s *MockupSecretStore) List() ([]string, error) {
keys := make([]string, 0, len(s.Secrets))
for k := range s.Secrets {
keys = append(keys, k)
}
return keys, nil
}
func (s *MockupSecretStore) GetResolver(key string) (telegraf.ResolveFunc, error) {
return func() ([]byte, bool, error) {
v, err := s.Get(key)
return v, s.Dynamic, err
}, nil
}
// Register the mockup plugin on loading
func init() {
// Register the mockup input plugin for the required names
inputs.Add("mockup", func() telegraf.Input { return &MockupSecretPlugin{} })
secretstores.Add("mockup", func(string) telegraf.SecretStore {
return &MockupSecretStore{}
})
}

View file

@ -0,0 +1,94 @@
package config
import (
"bytes"
"unsafe"
)
type unprotectedSecretImpl struct{}
func (*unprotectedSecretImpl) Container(secret []byte) secretContainer {
return &unprotectedSecretContainer{buf: newUnlockedBuffer(secret)}
}
func (*unprotectedSecretImpl) EmptyBuffer() SecretBuffer {
return &unlockedBuffer{}
}
func (*unprotectedSecretImpl) Wipe(secret []byte) {
for i := range secret {
secret[i] = 0
}
}
type unlockedBuffer struct {
content []byte
}
func newUnlockedBuffer(secret []byte) *unlockedBuffer {
return &unlockedBuffer{bytes.Clone(secret)}
}
func (lb *unlockedBuffer) Size() int {
return len(lb.content)
}
func (*unlockedBuffer) Grow(int) {
// The underlying byte-buffer will grow dynamically
}
func (lb *unlockedBuffer) Bytes() []byte {
return lb.content
}
func (lb *unlockedBuffer) TemporaryString() string {
//nolint:gosec // G103: Valid use of unsafe call to cast underlying bytes to string
return unsafe.String(&lb.content[0], len(lb.content))
}
func (lb *unlockedBuffer) String() string {
return string(lb.content)
}
func (lb *unlockedBuffer) Destroy() {
selectedImpl.Wipe(lb.content)
lb.content = nil
}
type unprotectedSecretContainer struct {
buf *unlockedBuffer
}
func (c *unprotectedSecretContainer) Destroy() {
if c.buf == nil {
return
}
// Wipe the secret from memory
c.buf.Destroy()
c.buf = nil
}
func (c *unprotectedSecretContainer) Equals(ref []byte) (bool, error) {
if c.buf == nil {
return false, nil
}
return bytes.Equal(c.buf.content, ref), nil
}
func (c *unprotectedSecretContainer) Buffer() (SecretBuffer, error) {
if c.buf == nil {
return &unlockedBuffer{}, nil
}
return newUnlockedBuffer(c.buf.content), nil
}
func (*unprotectedSecretContainer) AsBuffer(secret []byte) SecretBuffer {
return &unlockedBuffer{secret}
}
func (c *unprotectedSecretContainer) Replace(secret []byte) {
c.buf = newUnlockedBuffer(secret)
}

28
config/testdata/addressbook.proto vendored Normal file
View file

@ -0,0 +1,28 @@
syntax = "proto3";
package addressbook;
message Person {
string name = 1;
int32 id = 2; // Unique ID number for this person.
string email = 3;
uint32 age = 4;
enum PhoneType {
MOBILE = 0;
HOME = 1;
WORK = 2;
}
message PhoneNumber {
string number = 1;
PhoneType type = 2;
}
repeated PhoneNumber phones = 5;
}
message AddressBook {
repeated Person people = 1;
repeated string tags = 2;
}

4
config/testdata/azure_monitor.toml vendored Normal file
View file

@ -0,0 +1,4 @@
[[outputs.azure_monitor]]
[[outputs.azure_monitor]]
namespace_prefix = ""

2
config/testdata/default_parser.toml vendored Normal file
View file

@ -0,0 +1,2 @@
[[inputs.file]]
files = ["metrics"]

View file

@ -0,0 +1,2 @@
[[inputs.exec]]
command = '/usr/bin/echo {"value": 42}'

View file

@ -0,0 +1,8 @@
[[inputs.file]]
pass = ["foo"]
fieldpass = ["bar"]
fieldinclude = ["baz"]
drop = ["foo"]
fielddrop = ["bar"]
fieldexclude = ["baz"]

99
config/testdata/envvar_comments.toml vendored Normal file
View file

@ -0,0 +1,99 @@
# Telegraf Configuration
#
# Telegraf is entirely plugin driven. All metrics are gathered from the
# declared inputs, and sent to the declared outputs.
#
# Plugins must be declared in here to be active.
# To deactivate a plugin, comment out the name and any variables.
#
# Use 'telegraf -config telegraf.conf -test' to see what metrics a config
# file would generate.
#
# Environment variables can be used anywhere in this config file, simply surround
# them with ${}. For strings the variable must be within quotes (ie, "${STR_VAR}"),
# for numbers and booleans they should be plain (ie, ${INT_VAR}, ${BOOL_VAR})
[global_tags]
[agent]
interval = "10s"
round_interval = true
metric_batch_size = 1000
metric_buffer_limit = 10000
collection_jitter = "0s"
flush_interval = '10s'
flush_jitter = "0s"
precision = ""
hostname = ''
omit_hostname = false
[[outputs.influxdb]]
setting1 = '#'#test
setting2 = '''#'''#test
setting3 = "#"#test
setting4 = """#"""#test
wicked1 = "\""#test
wicked2 = """\""""#test
[[inputs.cpu]]
percpu = true
#totalcpu = true
# collect_cpu_time = false
## report_active = false
[[a.plugin]]
mylist = [
"value 1", # a good value
"value 2", # a better value
"value 3", "value 4",
'value5', """tagwith#value""",
] # Should work
[[some.stuff]]
a = 'not a #comment'
b = '''not a #comment'''
c = "not a #comment"
d = """not a #comment"""
e = '''not a #comment containing "quotes"'''
f = '''not a #comment containing 'quotes'?'''
g = """not a #comment containing "quotes"?"""
# Issue #14237
[[inputs.myplugin]]
value = '''This isn't a #comment.'''
[[processors.starlark]]
script = """
# Drop fields if they contain a string.
#
# Example Input:
# measurement,host=hostname a=1,b="somestring" 1597255410000000000
#
# Example Output:
# measurement,host=hostname a=1 1597255410000000000
def apply(metric):
for k, v in metric.fields.items():
if type(v) == "string":
metric.fields.pop(k)
return metric
"""
[[processors.starlark]]
script = '''
# Drop fields if they contain a string.
#
# Example Input:
# measurement,host=hostname a=1,b="somestring" 1597255410000000000
#
# Example Output:
# measurement,host=hostname a=1 1597255410000000000
def apply(metric):
for k, v in metric.fields.items():
if type(v) == "string":
metric.fields.pop(k)
return metric
'''

View file

@ -0,0 +1,99 @@
[global_tags]
[agent]
interval = "10s"
round_interval = true
metric_batch_size = 1000
metric_buffer_limit = 10000
collection_jitter = "0s"
flush_interval = '10s'
flush_jitter = "0s"
precision = ""
hostname = ''
omit_hostname = false
[[outputs.influxdb]]
setting1 = '#'
setting2 = '''#'''
setting3 = "#"
setting4 = """#"""
wicked1 = "\""
wicked2 = """\""""
[[inputs.cpu]]
percpu = true
[[a.plugin]]
mylist = [
"value 1",
"value 2",
"value 3", "value 4",
'value5', """tagwith#value""",
]
[[some.stuff]]
a = 'not a #comment'
b = '''not a #comment'''
c = "not a #comment"
d = """not a #comment"""
e = '''not a #comment containing "quotes"'''
f = '''not a #comment containing 'quotes'?'''
g = """not a #comment containing "quotes"?"""
[[inputs.myplugin]]
value = '''This isn't a #comment.'''
[[processors.starlark]]
script = """
# Drop fields if they contain a string.
#
# Example Input:
# measurement,host=hostname a=1,b="somestring" 1597255410000000000
#
# Example Output:
# measurement,host=hostname a=1 1597255410000000000
def apply(metric):
for k, v in metric.fields.items():
if type(v) == "string":
metric.fields.pop(k)
return metric
"""
[[processors.starlark]]
script = '''
# Drop fields if they contain a string.
#
# Example Input:
# measurement,host=hostname a=1,b="somestring" 1597255410000000000
#
# Example Output:
# measurement,host=hostname a=1 1597255410000000000
def apply(metric):
for k, v in metric.fields.items():
if type(v) == "string":
metric.fields.pop(k)
return metric
'''

View file

@ -0,0 +1,2 @@
[[processors.processor]]
metricpass = '("state" in tags && tags.state == "on") || time > timestamp("2023-04-24T00:00:00Z")'

7
config/testdata/inline_table.toml vendored Normal file
View file

@ -0,0 +1,7 @@
[[outputs.http]]
headers = { Authorization = "Token $TOKEN",Content-Type = "application/json" }
taginclude = ["org_id"]
[[outputs.http]]
headers = { Authorization = "Token $TOKEN",Content-Type = "application/json" }
taginclude = ["org_id"]

2
config/testdata/invalid_field.toml vendored Normal file
View file

@ -0,0 +1,2 @@
[[inputs.http_listener_v2]]
not_a_field = true

View file

@ -0,0 +1,5 @@
[[inputs.parser]]
data_format = "xpath_json"
[[inputs.parser.xpath]]
not_a_field = true

View file

@ -0,0 +1,5 @@
[[inputs.parser_func]]
data_format = "xpath_json"
[[inputs.parser_func.xpath]]
not_a_field = true

View file

@ -0,0 +1,2 @@
[[processors.processor]]
not_a_field = true

View file

@ -0,0 +1,3 @@
[[processors.processor_parser]]
not_a_field = true
data_format = "influx"

View file

@ -0,0 +1,5 @@
[[processors.processor_parser]]
data_format = "xpath_json"
[[processors.processor_parser.xpath]]
not_a_field = true

View file

@ -0,0 +1,3 @@
[[processors.processor_parserfunc]]
not_a_field = true
data_format = "influx"

View file

@ -0,0 +1,5 @@
[[inputs.parser_func]]
data_format = "xpath_json"
[[inputs.parser_func.xpath]]
not_a_field = true

View file

@ -0,0 +1,3 @@
[[processors.processor_parser]]
not_a_field = true
data_format = "influx"

View file

@ -0,0 +1,3 @@
[[processors.processor_parserfunc]]
not_a_field = true
data_format = "influx"

View file

@ -0,0 +1,3 @@
[[inputs.parser]]
not_a_field = true
data_format = "influx"

View file

@ -0,0 +1,3 @@
[[inputs.parser_func]]
not_a_field = true
data_format = "influx"

4
config/testdata/non_slice_slice.toml vendored Normal file
View file

@ -0,0 +1,4 @@
[[outputs.http]]
[outputs.http.headers]
Content-Type = "application/json"
taginclude = ["org_id"]

65
config/testdata/parsers_new.toml vendored Normal file
View file

@ -0,0 +1,65 @@
[[inputs.parser_test_new]]
data_format = "collectd"
[[inputs.parser_test_new]]
data_format = "csv"
csv_header_row_count = 42
[[inputs.parser_test_new]]
data_format = "dropwizard"
[[inputs.parser_test_new]]
data_format = "form_urlencoded"
[[inputs.parser_test_new]]
data_format = "graphite"
[[inputs.parser_test_new]]
data_format = "grok"
grok_patterns = ["%{COMBINED_LOG_FORMAT}"]
[[inputs.parser_test_new]]
data_format = "influx"
[[inputs.parser_test_new]]
data_format = "json"
[[inputs.parser_test_new]]
data_format = "json_v2"
[[inputs.parser_test_new.json_v2]]
[[inputs.parser_test_new.json_v2.field]]
path = ""
rename = ""
type = "int"
[[inputs.parser_test_new]]
data_format = "logfmt"
[[inputs.parser_test_new]]
data_format = "nagios"
[[inputs.parser_test_new]]
data_format = "prometheus"
[[inputs.parser_test_new]]
data_format = "prometheusremotewrite"
[[inputs.parser_test_new]]
data_format = "value"
[[inputs.parser_test_new]]
data_format = "wavefront"
[[inputs.parser_test_new]]
data_format = "xml"
[[inputs.parser_test_new]]
data_format = "xpath_json"
[[inputs.parser_test_new]]
data_format = "xpath_msgpack"
[[inputs.parser_test_new]]
data_format = "xpath_protobuf"
xpath_protobuf_file = "testdata/addressbook.proto"
xpath_protobuf_type = "addressbook.AddressBook"

View file

@ -0,0 +1,7 @@
[[processors.processor]]
[[processors.parser_test]]
[[processors.processor_parser]]
[[processors.processor_parserfunc]]

View file

@ -0,0 +1,16 @@
[[processors.parser_test]]
[[processors.processor_parser]]
order = 2
[[processors.processor_parserfunc]]
[[processors.processor]]
order = 1
[[processors.processor_parser]]
order = 3
[[processors.processor_parserfunc]]
order = 3

View file

@ -0,0 +1,9 @@
[[processors.parser_test]]
[[processors.processor_parser]]
[[processors.processor_parserfunc]]
[[processors.processor]]
order = 1

View file

@ -0,0 +1,65 @@
[[processors.parser_test]]
data_format = "collectd"
[[processors.parser_test]]
data_format = "csv"
csv_header_row_count = 42
[[processors.parser_test]]
data_format = "dropwizard"
[[processors.parser_test]]
data_format = "form_urlencoded"
[[processors.parser_test]]
data_format = "graphite"
[[processors.parser_test]]
data_format = "grok"
grok_patterns = ["%{COMBINED_LOG_FORMAT}"]
[[processors.parser_test]]
data_format = "influx"
[[processors.parser_test]]
data_format = "json"
[[processors.parser_test]]
data_format = "json_v2"
[[processors.parser_test.json_v2]]
[[processors.parser_test.json_v2.field]]
path = ""
rename = ""
type = "int"
[[processors.parser_test]]
data_format = "logfmt"
[[processors.parser_test]]
data_format = "nagios"
[[processors.parser_test]]
data_format = "prometheus"
[[processors.parser_test]]
data_format = "prometheusremotewrite"
[[processors.parser_test]]
data_format = "value"
[[processors.parser_test]]
data_format = "wavefront"
[[processors.parser_test]]
data_format = "xml"
[[processors.parser_test]]
data_format = "xpath_json"
[[processors.parser_test]]
data_format = "xpath_msgpack"
[[processors.parser_test]]
data_format = "xpath_protobuf"
xpath_protobuf_file = "testdata/addressbook.proto"
xpath_protobuf_type = "addressbook.AddressBook"

32
config/testdata/serializers_new.toml vendored Normal file
View file

@ -0,0 +1,32 @@
[[outputs.serializer_test_new]]
data_format = "carbon2"
[[outputs.serializer_test_new]]
data_format = "csv"
[[outputs.serializer_test_new]]
data_format = "graphite"
[[outputs.serializer_test_new]]
data_format = "influx"
[[outputs.serializer_test_new]]
data_format = "json"
[[outputs.serializer_test_new]]
data_format = "msgpack"
[[outputs.serializer_test_new]]
data_format = "nowmetric"
[[outputs.serializer_test_new]]
data_format = "prometheus"
[[outputs.serializer_test_new]]
data_format = "prometheusremotewrite"
[[outputs.serializer_test_new]]
data_format = "splunkmetric"
[[outputs.serializer_test_new]]
data_format = "wavefront"

32
config/testdata/serializers_old.toml vendored Normal file
View file

@ -0,0 +1,32 @@
[[outputs.serializer_test_old]]
data_format = "carbon2"
[[outputs.serializer_test_old]]
data_format = "csv"
[[outputs.serializer_test_old]]
data_format = "graphite"
[[outputs.serializer_test_old]]
data_format = "influx"
[[outputs.serializer_test_old]]
data_format = "json"
[[outputs.serializer_test_old]]
data_format = "msgpack"
[[outputs.serializer_test_old]]
data_format = "nowmetric"
[[outputs.serializer_test_old]]
data_format = "prometheus"
[[outputs.serializer_test_old]]
data_format = "prometheusremotewrite"
[[outputs.serializer_test_old]]
data_format = "splunkmetric"
[[outputs.serializer_test_old]]
data_format = "wavefront"

11
config/testdata/single_plugin.toml vendored Normal file
View file

@ -0,0 +1,11 @@
[[inputs.memcached]]
servers = ["localhost"]
namepass = ["metricname1"]
namedrop = ["metricname2"]
fieldinclude = ["some", "strings"]
fieldexclude = ["other", "stuff"]
interval = "5s"
[inputs.memcached.tagpass]
goodtag = ["mytag"]
[inputs.memcached.tagdrop]
badtag = ["othertag"]

View file

@ -0,0 +1,35 @@
# Telegraf Configuration
#
# Telegraf is entirely plugin driven. All metrics are gathered from the
# declared inputs, and sent to the declared outputs.
#
# Plugins must be declared in here to be active.
# To deactivate a plugin, comment out the name and any variables.
#
# Use 'telegraf -config telegraf.conf -test' to see what metrics a config
# file would generate.
#
# Environment variables can be used anywhere in this config file, simply surround
# them with ${}. For strings the variable must be within quotes (ie, "${STR_VAR}"),
# for numbers and booleans they should be plain (ie, ${INT_VAR}, ${BOOL_VAR})
[[inputs.memcached]]
# this comment line will be ignored by the parser
servers = ["$MY_TEST_SERVER"]
namepass = ["metricname1", "ip_${MY_TEST_SERVER}_name"] # this comment will be ignored as well
namedrop = ["metricname2"]
fieldinclude = ["some", "strings"]
fieldexclude = ["other", "stuff"]
interval = "$TEST_INTERVAL"
##### this input is provided to test multiline strings
command = """
Raw command which may or may not contain # in it
# is unique""" # Multiline comment black starting with #
[inputs.memcached.tagpass]
goodtag = ["mytag", """tagwith#value""",
# comment in between array items
# should ignore "quotes" in comments
'''TagWithMultilineSyntax''', ## ignore this comment
] # hastag
[inputs.memcached.tagdrop]
badtag = ["othertag"]

View file

@ -0,0 +1,5 @@
[[inputs.memcached]]
servers = [
# A comment in the array
"localhost"
]

View file

@ -0,0 +1,13 @@
[[inputs.memcached]]
servers = ["localhost"]
namepass = ["metricname1"]
namepass_separator = "."
namedrop = ["metricname2"]
namedrop_separator = "."
fieldinclude = ["some", "strings"]
fieldexclude = ["other", "stuff"]
interval = "5s"
[inputs.memcached.tagpass]
goodtag = ["mytag"]
[inputs.memcached.tagdrop]
badtag = ["othertag"]

5
config/testdata/slice_comment.toml vendored Normal file
View file

@ -0,0 +1,5 @@
[[outputs.http]]
scopes = [
# comment
"test" # comment
]

5
config/testdata/special_types.key vendored Normal file
View file

@ -0,0 +1,5 @@
-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIFYI4Hm+jRW3OC3zvoWDaCig6E7X0Ql9l8elHPU3e5+toAoGCCqGSM49
AwEHoUQDQgAEGOw1XQ84Ai3GTZJ5o5u1yTFgA3VLZTTT0oHol06LRj5Md3oRy0MQ
QO5OhsAGGz16SYcPHf77aZmf2Of6ixYaLQ==
-----END EC PRIVATE KEY-----

11
config/testdata/special_types.pem vendored Normal file
View file

@ -0,0 +1,11 @@
-----BEGIN CERTIFICATE-----
MIIBjTCCATOgAwIBAgIRALJ1hlgDYCh5dWfr6tdrBEYwCgYIKoZIzj0EAwIwFDES
MBAGA1UEAxMJbG9jYWxob3N0MB4XDTIyMDExMjA3NTgyMloXDTIyMDExMzA3NTgy
MlowFDESMBAGA1UEAxMJbG9jYWxob3N0MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcD
QgAEGOw1XQ84Ai3GTZJ5o5u1yTFgA3VLZTTT0oHol06LRj5Md3oRy0MQQO5OhsAG
Gz16SYcPHf77aZmf2Of6ixYaLaNmMGQwDgYDVR0PAQH/BAQDAgeAMB0GA1UdJQQW
MBQGCCsGAQUFBwMBBggrBgEFBQcDAjAdBgNVHQ4EFgQUuKpGXAb1DaVSffJ/xuF6
FE31CC8wFAYDVR0RBA0wC4IJbG9jYWxob3N0MAoGCCqGSM49BAMCA0gAMEUCIHCb
m2phe189gftRke2Mo45lDsEAGaXsjA4lO/IOMo5lAiEA5k2X0bQfFhSfAcZPFtDI
iUwvC9SD3+CnzkP35O0jo+c=
-----END CERTIFICATE-----

8
config/testdata/special_types.toml vendored Normal file
View file

@ -0,0 +1,8 @@
[[inputs.http_listener_v2]]
write_timeout = "1s"
max_body_size = "1MiB"
paths = [ """
/path/
""" ]
tls_cert = """./testdata/special_types.pem"""
tls_key = '''./testdata/special_types.key'''

View file

@ -0,0 +1,42 @@
[[inputs.statetest]]
[[inputs.statetest]]
servers = ["myserver.com", "myserver.org"]
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
port = 0
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
port = 80
method = "strange"
[inputs.statetest.params]
a = "foo"
b = "bar"
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
port = 80
method = "strange"
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]}
]
[inputs.statetest.params]
a = "foo"
b = "bar"
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
port = 80
method = "strange"
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]},
{name="beta", factor=2.71828, enabled=true, bits=[1,2,3]}
]
[inputs.statetest.params]
a = "foo"
b = "bar"

View file

@ -0,0 +1,60 @@
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
port = 80
method = "strange"
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]},
{name="beta", factor=2.71828, enabled=true, bits=[1,2,3]}
]
[inputs.statetest.params]
a = "foo"
b = "bar"
[[inputs.statetest]]
## What a wonderful world...
servers = ["myserver.org", "myserver.com"]
port = 80
method = "strange"
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]},
{name="beta", factor=2.71828, enabled=true, bits=[1,2,3]}
]
[inputs.statetest.params]
a = "foo"
b = "bar"
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
method = "strange"
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]},
{name="beta", factor=2.71828, enabled=true, bits=[1,2,3]}
]
port = 80
[inputs.statetest.params]
a = "foo"
b = "bar"
[[inputs.statetest]]
servers = ["myserver.org", "myserver.com"]
port = 80
method = "strange"
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]},
{name="beta", factor=2.71828, enabled=true, bits=[1,2,3]}
]
[inputs.statetest.params]
b = "bar"
a = "foo"
[[inputs.statetest]]
method = "strange"
servers = ["myserver.org", "myserver.com"]
port = 80
setup = [
{name="alpha", factor=3.1415, enabled=true, bits=[1,2,3]},
{name="beta", factor=2.71828, enabled=true, bits=[1,2,3]}
]
[inputs.statetest.params]
a = "foo"
b = "bar"

View file

@ -0,0 +1,17 @@
[[inputs.statetest]]
servers = ["myserverA.org"]
port = 42
method = "strange"
[[inputs.statetest]]
servers = ["myserverB.org"]
port = 23
method = "strange"
[[inputs.statetest]]
servers = ["myserverC.org"]
port = 80
method = "strange"
[inputs.statetest.params]
a = "foo"
b = "bar"

View file

@ -0,0 +1,8 @@
[[processors.statetest]]
option = "foo"
[[processors.statetest]]
option = "bar"
[[processors.statetest]]
option = "captain obvious"

4
config/testdata/subconfig/exec.conf vendored Normal file
View file

@ -0,0 +1,4 @@
[[inputs.exec]]
# the command to run
command = "/usr/bin/myothercollector --foo=bar"
name_suffix = "_myothercollector"

View file

@ -0,0 +1,11 @@
[[inputs.memcached]]
servers = ["192.168.1.1"]
namepass = ["metricname1"]
namedrop = ["metricname2"]
pass = ["some", "strings"]
drop = ["other", "stuff"]
interval = "5s"
[inputs.memcached.tagpass]
goodtag = ["mytag"]
[inputs.memcached.tagdrop]
badtag = ["othertag"]

View file

@ -0,0 +1,2 @@
[[inputs.procstat]]
pid_file = "/var/run/grafana-server.pid"

297
config/testdata/telegraf-agent.toml vendored Normal file
View file

@ -0,0 +1,297 @@
# Telegraf configuration
# Telegraf is entirely plugin driven. All metrics are gathered from the
# declared inputs.
# Even if a plugin has no configuration, it must be declared in here
# to be active. Declaring a plugin means just specifying the name
# as a section with no variables. To deactivate a plugin, comment
# out the name and any variables.
# Use 'telegraf -config telegraf.toml -test' to see what metrics a config
# file would generate.
# One rule that plugins conform to is wherever a connection string
# can be passed, the values '' and 'localhost' are treated specially.
# They indicate to the plugin to use their own builtin configuration to
# connect to the local system.
# NOTE: The configuration has a few required parameters. They are marked
# with 'required'. Be sure to edit those to make this configuration work.
# Tags can also be specified via a normal map, but only one form at a time:
[global_tags]
dc = "us-east-1"
# Configuration for telegraf agent
[agent]
# Default data collection interval for all plugins
interval = "10s"
# run telegraf in debug mode
debug = false
# Override default hostname, if empty use os.Hostname()
hostname = ""
###############################################################################
# OUTPUTS #
###############################################################################
# Configuration for influxdb server to send metrics to
[[outputs.influxdb]]
# The full HTTP endpoint URL for your InfluxDB instance
# Multiple urls can be specified for InfluxDB cluster support. Server to
# write to will be randomly chosen each interval.
urls = ["http://localhost:8086"] # required.
# The target database for metrics. This database must already exist
database = "telegraf" # required.
[[outputs.influxdb]]
urls = ["udp://localhost:8089"]
database = "udp-telegraf"
# Configuration for the Kafka server to send metrics to
[[outputs.kafka]]
# URLs of kafka brokers
brokers = ["localhost:9092"]
# Kafka topic for producer messages
topic = "telegraf"
# Telegraf tag to use as a routing key
# ie, if this tag exists, its value will be used as the routing key
routing_tag = "host"
###############################################################################
# PLUGINS #
###############################################################################
# Read Apache status information (mod_status)
[[inputs.apache]]
# An array of Apache status URI to gather stats.
urls = ["http://localhost/server-status?auto"]
# Read metrics about cpu usage
[[inputs.cpu]]
# Whether to report per-cpu stats or not
percpu = true
# Whether to report total system cpu stats or not
totalcpu = true
# Comment this line if you want the raw CPU time metrics
fieldexclude = ["cpu_time"]
# Read metrics about disk usage by mount point
[[inputs.diskio]]
# no configuration
# Read metrics from one or many disque servers
[[inputs.disque]]
# An array of URI to gather stats about. Specify an ip or hostname
# with optional port and password. ie disque://localhost, disque://10.10.3.33:18832,
# 10.0.0.1:10000, etc.
#
# If no servers are specified, then localhost is used as the host.
servers = ["localhost"]
# Read stats from one or more Elasticsearch servers or clusters
[[inputs.elasticsearch]]
# specify a list of one or more Elasticsearch servers
servers = ["http://localhost:9200"]
# set local to false when you want to read the indices stats from all nodes
# within the cluster
local = true
# Read flattened metrics from one or more commands that output JSON to stdout
[[inputs.exec]]
# the command to run
command = "/usr/bin/mycollector --foo=bar"
name_suffix = "_mycollector"
# Read metrics of haproxy, via socket or csv stats page
[[inputs.haproxy]]
# An array of address to gather stats about. Specify an ip on hostname
# with optional port. ie localhost, 10.10.3.33:1936, etc.
#
# If no servers are specified, then default to 127.0.0.1:1936
servers = ["http://myhaproxy.com:1936", "http://anotherhaproxy.com:1936"]
# Or you can also use local socket(not work yet)
# servers = ["socket:/run/haproxy/admin.sock"]
# Read flattened metrics from one or more JSON HTTP endpoints
[[inputs.http]]
# a name for the service being polled
name_override = "webserver_stats"
# URL of each server in the service's cluster
urls = [
"http://localhost:9999/stats/",
"http://localhost:9998/stats/",
]
# HTTP method to use (case-sensitive)
# method = "GET"
data_format = "json"
# Read metrics about disk IO by device
[[inputs.diskio]]
# no configuration
# read metrics from a Kafka 0.9+ topic
[[inputs.kafka_consumer]]
## kafka brokers
brokers = ["localhost:9092"]
## topic(s) to consume
topics = ["telegraf"]
## the name of the consumer group
consumer_group = "telegraf_metrics_consumers"
## Offset (must be either "oldest" or "newest")
offset = "oldest"
# Read metrics from a LeoFS Server via SNMP
[[inputs.leofs]]
# An array of URI to gather stats about LeoFS.
# Specify an ip or hostname with port. ie 127.0.0.1:4020
#
# If no servers are specified, then 127.0.0.1 is used as the host and 4020 as the port.
servers = ["127.0.0.1:4021"]
# Read metrics about memory usage
[[inputs.mem]]
# no configuration
# Read metrics from one or many memcached servers
[[inputs.memcached]]
# An array of address to gather stats about. Specify an ip on hostname
# with optional port. ie localhost, 10.0.0.1:11211, etc.
#
# If no servers are specified, then localhost is used as the host.
servers = ["localhost"]
# Telegraf plugin for gathering metrics from N Mesos masters
[[inputs.mesos]]
# Timeout, in ms.
timeout = 100
# A list of Mesos masters, default value is localhost:5050.
masters = ["localhost:5050"]
# Metrics groups to be collected, by default, all enabled.
master_collections = ["resources","master","system","slaves","frameworks","messages","evqueue","registrar"]
# Read metrics from one or many MongoDB servers
[[inputs.mongodb]]
# An array of URI to gather stats about. Specify an ip or hostname
# with optional port add password. ie mongodb://user:auth_key@10.10.3.30:27017,
# mongodb://10.10.3.33:18832, 10.0.0.1:10000, etc.
#
# If no servers are specified, then 127.0.0.1 is used as the host and 27107 as the port.
servers = ["127.0.0.1:27017"]
# Read metrics from one or many mysql servers
[[inputs.mysql]]
# specify servers via a url matching:
# [username[:password]@][protocol[(address)]]/[?tls=[true|false|skip-verify]]
# e.g.
# servers = ["root:root@http://10.0.0.18/?tls=false"]
# servers = ["root:passwd@tcp(127.0.0.1:3306)/"]
#
# If no servers are specified, then localhost is used as the host.
servers = ["localhost"]
# Read metrics about network interface usage
[[inputs.net]]
# By default, telegraf gathers stats from any up interface (excluding loopback)
# Setting interfaces will tell it to gather these explicit interfaces,
# regardless of status.
#
# interfaces = ["eth0", ... ]
# Read Nginx's basic status information (ngx_http_stub_status_module)
[[inputs.nginx]]
# An array of Nginx stub_status URI to gather stats.
urls = ["http://localhost/status"]
# Ping given url(s) and return statistics
[[inputs.ping]]
# urls to ping
urls = ["www.google.com"] # required
# number of pings to send (ping -c <COUNT>)
count = 1 # required
# interval, in s, at which to ping. 0 == default (ping -i <PING_INTERVAL>)
ping_interval = 0.0
# ping timeout, in s. 0 == no timeout (ping -t <TIMEOUT>)
timeout = 0.0
# interface to send ping from (ping -I <INTERFACE>)
interface = ""
# Read metrics from one or many postgresql servers
[[inputs.postgresql]]
# specify address via a url matching:
# postgres://[pqgotest[:password]]@localhost[/dbname]?sslmode=[disable|verify-ca|verify-full]
# or a simple string:
# host=localhost user=pqgotest password=... sslmode=... dbname=app_production
#
# All connection parameters are optional. By default, the host is localhost
# and the user is the currently running user. For localhost, we default
# to sslmode=disable as well.
#
# Without the dbname parameter, the driver will default to a database
# with the same name as the user. This dbname is just for instantiating a
# connection with the server and doesn't restrict the databases we are trying
# to grab metrics for.
#
address = "sslmode=disable"
# A list of databases to pull metrics about. If not specified, metrics for all
# databases are gathered.
# databases = ["app_production", "blah_testing"]
# [[postgresql.servers]]
# address = "influx@remoteserver"
# Read metrics from one or many prometheus clients
[[inputs.prometheus]]
# An array of urls to scrape metrics from.
urls = ["http://localhost:9100/metrics"]
# Read metrics from one or many RabbitMQ servers via the management API
[[inputs.rabbitmq]]
# Specify servers via an array of tables
# name = "rmq-server-1" # optional tag
# url = "http://localhost:15672"
# username = "guest"
# password = "guest"
# A list of nodes to pull metrics about. If not specified, metrics for
# all nodes are gathered.
# nodes = ["rabbit@node1", "rabbit@node2"]
# Read metrics from one or many redis servers
[[inputs.redis]]
# An array of URI to gather stats about. Specify an ip or hostname
# with optional port add password. ie redis://localhost, redis://10.10.3.33:18832,
# 10.0.0.1:10000, etc.
#
# If no servers are specified, then localhost is used as the host.
servers = ["localhost"]
# Read metrics from one or many RethinkDB servers
[[inputs.rethinkdb]]
# An array of URI to gather stats about. Specify an ip or hostname
# with optional port add password. ie rethinkdb://user:auth_key@10.10.3.30:28105,
# rethinkdb://10.10.3.33:18832, 10.0.0.1:10000, etc.
#
# If no servers are specified, then 127.0.0.1 is used as the host and 28015 as the port.
servers = ["127.0.0.1:28015"]
# Read metrics about swap memory usage
[[inputs.swap]]
# no configuration
# Read metrics about system load & uptime
[[inputs.system]]
# no configuration

5
config/testdata/wrong_cert_path.toml vendored Normal file
View file

@ -0,0 +1,5 @@
[[inputs.http_listener_v2]]
write_timeout = "1s"
max_body_size = "1MiB"
tls_cert = "invalid.pem"
tls_key = "invalid.key"

2
config/testdata/wrong_field_type.toml vendored Normal file
View file

@ -0,0 +1,2 @@
[[inputs.http_listener_v2]]
port = "80"

View file

@ -0,0 +1,2 @@
[[inputs.http_listener_v2]]
methods = "POST"

85
config/types.go Normal file
View file

@ -0,0 +1,85 @@
package config
import (
"fmt"
"regexp"
"strconv"
"strings"
"time"
"github.com/alecthomas/units"
)
// Regexp for day specifications in durations
var durationDayRe = regexp.MustCompile(`(\d+(?:\.\d+)?)d`)
// Duration is a time.Duration
type Duration time.Duration
// Size is an int64
type Size int64
// UnmarshalText parses the duration from the Text config file
func (d *Duration) UnmarshalText(b []byte) error {
// convert to string
durStr := string(b)
// Value is a TOML number (e.g. 3, 10, 3.5)
// First try parsing as integer seconds
sI, err := strconv.ParseInt(durStr, 10, 64)
if err == nil {
dur := time.Second * time.Duration(sI)
*d = Duration(dur)
return nil
}
// Second try parsing as float seconds
sF, err := strconv.ParseFloat(durStr, 64)
if err == nil {
dur := float64(time.Second) * sF
*d = Duration(dur)
return nil
}
// Finally, try value is a TOML string (e.g. "3s", 3s) or literal (e.g. '3s')
if durStr == "" {
*d = Duration(0)
return nil
}
// Handle "day" intervals and replace them with the "hours" equivalent
for _, m := range durationDayRe.FindAllStringSubmatch(durStr, -1) {
days, err := strconv.ParseFloat(m[1], 64)
if err != nil {
return fmt.Errorf("converting %q to hours failed: %w", durStr, err)
}
hours := strconv.FormatFloat(days*24, 'f', -1, 64) + "h"
durStr = strings.Replace(durStr, m[0], hours, 1)
}
dur, err := time.ParseDuration(durStr)
if err != nil {
return err
}
*d = Duration(dur)
return nil
}
func (s *Size) UnmarshalText(b []byte) error {
if len(b) == 0 {
return nil
}
str := string(b)
val, err := strconv.ParseInt(str, 10, 64)
if err == nil {
*s = Size(val)
return nil
}
val, err = units.ParseStrictBytes(str)
if err != nil {
return err
}
*s = Size(val)
return nil
}

277
config/types_test.go Normal file
View file

@ -0,0 +1,277 @@
package config_test
import (
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/plugins/inputs"
"github.com/influxdata/telegraf/plugins/processors/reverse_dns"
)
func TestConfigDuration(t *testing.T) {
c := config.NewConfig()
err := c.LoadConfigData([]byte(`
[[processors.reverse_dns]]
cache_ttl = "3h"
lookup_timeout = "17s"
max_parallel_lookups = 13
ordered = true
[[processors.reverse_dns.lookup]]
field = "source_ip"
dest = "source_name"
`), config.EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Processors, 1)
p := c.Processors[0].Processor.(*reverse_dns.ReverseDNS)
require.EqualValues(t, 3*time.Hour, p.CacheTTL)
require.EqualValues(t, 17*time.Second, p.LookupTimeout)
require.Equal(t, 13, p.MaxParallelLookups)
require.True(t, p.Ordered)
}
func TestDuration(t *testing.T) {
var d config.Duration
d = config.Duration(0)
require.NoError(t, d.UnmarshalText([]byte(`1s`)))
require.Equal(t, time.Second, time.Duration(d))
d = config.Duration(0)
require.NoError(t, d.UnmarshalText([]byte(`10`)))
require.Equal(t, 10*time.Second, time.Duration(d))
d = config.Duration(0)
require.NoError(t, d.UnmarshalText([]byte(`1.5`)))
require.Equal(t, 1500*time.Millisecond, time.Duration(d))
d = config.Duration(0)
require.NoError(t, d.UnmarshalText([]byte(``)))
require.Equal(t, 0*time.Second, time.Duration(d))
require.Error(t, d.UnmarshalText([]byte(`"1"`))) // string missing unit
require.Error(t, d.UnmarshalText([]byte(`'2'`))) // string missing unit
require.Error(t, d.UnmarshalText([]byte(`'ns'`))) // string missing time
require.Error(t, d.UnmarshalText([]byte(`'us'`))) // string missing time
}
func TestSize(t *testing.T) {
var s config.Size
require.NoError(t, s.UnmarshalText([]byte(`1B`)))
require.Equal(t, int64(1), int64(s))
s = config.Size(0)
require.NoError(t, s.UnmarshalText([]byte(`1`)))
require.Equal(t, int64(1), int64(s))
s = config.Size(0)
require.NoError(t, s.UnmarshalText([]byte(`1GB`)))
require.Equal(t, int64(1000*1000*1000), int64(s))
s = config.Size(0)
require.NoError(t, s.UnmarshalText([]byte(`12GiB`)))
require.Equal(t, int64(12*1024*1024*1024), int64(s))
}
func TestTOMLParsingStringDurations(t *testing.T) {
cfg := []byte(`
[[inputs.typesmockup]]
durations = [
"1s",
'''1s''',
'1s',
"1.5s",
"",
'',
"2h",
"42m",
"100ms",
"100us",
"100ns",
"1d",
"7.5d",
"7d8h15m",
"3d7d",
"15m8h3.5d"
]
`)
expected := []time.Duration{
1 * time.Second,
1 * time.Second,
1 * time.Second,
1500 * time.Millisecond,
0,
0,
2 * time.Hour,
42 * time.Minute,
100 * time.Millisecond,
100 * time.Microsecond,
100 * time.Nanosecond,
24 * time.Hour,
7*24*time.Hour + 12*time.Hour,
7*24*time.Hour + 8*time.Hour + 15*time.Minute,
10 * 24 * time.Hour,
3*24*time.Hour + 12*time.Hour + 8*time.Hour + 15*time.Minute,
}
// Load the data
c := config.NewConfig()
err := c.LoadConfigData(cfg, config.EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
plugin := c.Inputs[0].Input.(*MockupTypesPlugin)
require.Empty(t, plugin.Sizes)
require.Len(t, plugin.Durations, len(expected))
for i, actual := range plugin.Durations {
require.EqualValuesf(t, expected[i], actual, "case %d failed", i)
}
}
func TestTOMLParsingIntegerDurations(t *testing.T) {
cfg := []byte(`
[[inputs.typesmockup]]
durations = [
1,
10,
3601
]
`)
expected := []time.Duration{
1 * time.Second,
10 * time.Second,
3601 * time.Second,
}
// Load the data
c := config.NewConfig()
err := c.LoadConfigData(cfg, config.EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
plugin := c.Inputs[0].Input.(*MockupTypesPlugin)
require.Empty(t, plugin.Sizes)
require.Len(t, plugin.Durations, len(expected))
for i, actual := range plugin.Durations {
require.EqualValuesf(t, expected[i], actual, "case %d failed", i)
}
}
func TestTOMLParsingFloatDurations(t *testing.T) {
cfg := []byte(`
[[inputs.typesmockup]]
durations = [
42.0,
1.5
]
`)
expected := []time.Duration{
42 * time.Second,
1500 * time.Millisecond,
}
// Load the data
c := config.NewConfig()
err := c.LoadConfigData(cfg, config.EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
plugin := c.Inputs[0].Input.(*MockupTypesPlugin)
require.Empty(t, plugin.Sizes)
require.Len(t, plugin.Durations, len(expected))
for i, actual := range plugin.Durations {
require.EqualValuesf(t, expected[i], actual, "case %d failed", i)
}
}
func TestTOMLParsingStringSizes(t *testing.T) {
cfg := []byte(`
[[inputs.typesmockup]]
sizes = [
"1B",
"1",
'1',
'''15kB''',
"""15KiB""",
"1GB",
"12GiB"
]
`)
expected := []int64{
1,
1,
1,
15 * 1000,
15 * 1024,
1000 * 1000 * 1000,
12 * 1024 * 1024 * 1024,
}
// Load the data
c := config.NewConfig()
err := c.LoadConfigData(cfg, config.EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
plugin := c.Inputs[0].Input.(*MockupTypesPlugin)
require.Empty(t, plugin.Durations)
require.Len(t, plugin.Sizes, len(expected))
for i, actual := range plugin.Sizes {
require.EqualValuesf(t, expected[i], actual, "case %d failed", i)
}
}
func TestTOMLParsingIntegerSizes(t *testing.T) {
cfg := []byte(`
[[inputs.typesmockup]]
sizes = [
0,
1,
1000,
1024
]
`)
expected := []int64{
0,
1,
1000,
1024,
}
// Load the data
c := config.NewConfig()
err := c.LoadConfigData(cfg, config.EmptySourcePath)
require.NoError(t, err)
require.Len(t, c.Inputs, 1)
plugin := c.Inputs[0].Input.(*MockupTypesPlugin)
require.Empty(t, plugin.Durations)
require.Len(t, plugin.Sizes, len(expected))
for i, actual := range plugin.Sizes {
require.EqualValuesf(t, expected[i], actual, "case %d failed", i)
}
}
// Mockup (input) plugin for testing to avoid cyclic dependencies
type MockupTypesPlugin struct {
Durations []config.Duration `toml:"durations"`
Sizes []config.Size `toml:"sizes"`
}
func (*MockupTypesPlugin) SampleConfig() string { return "Mockup test types plugin" }
func (*MockupTypesPlugin) Gather(_ telegraf.Accumulator) error { return nil }
// Register the mockup plugin on loading
func init() {
// Register the mockup input plugin for the required names
inputs.Add("typesmockup", func() telegraf.Input { return &MockupTypesPlugin{} })
}