1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,69 @@
# File Input Plugin
This plugin reads the __complete__ contents of the configured files in
__every__ interval. The file content is split line-wise and parsed according to
one of the supported [data formats][data_formats].
> [!TIP]
> If you wish to only process newly appended lines use the [tail][tail] input
> plugin instead.
⭐ Telegraf v1.8.0
🏷️ system
💻 all
[data_formats]: /docs/DATA_FORMATS_INPUT.md
[tail]: /plugins/inputs/tail
## Global configuration options <!-- @/docs/includes/plugin_config.md -->
In addition to the plugin-specific configuration settings, plugins support
additional global and plugin configuration settings. These settings are used to
modify metrics, tags, and field or create aliases and configure ordering, etc.
See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
[CONFIGURATION.md]: ../../../docs/CONFIGURATION.md#plugins
## Configuration
```toml @sample.conf
# Parse a complete file each interval
[[inputs.file]]
## Files to parse each interval. Accept standard unix glob matching rules,
## as well as ** to match recursive files and directories.
files = ["/tmp/metrics.out"]
## Character encoding to use when interpreting the file contents. Invalid
## characters are replaced using the unicode replacement character. When set
## to the empty string the data is not decoded to text.
## ex: character_encoding = "utf-8"
## character_encoding = "utf-16le"
## character_encoding = "utf-16be"
## character_encoding = ""
# character_encoding = ""
## Data format to consume.
## Each data format has its own unique set of configuration options, read
## more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "influx"
## Please use caution when using the following options: when file name
## variation is high, this can increase the cardinality significantly. Read
## more about cardinality here:
## https://docs.influxdata.com/influxdb/cloud/reference/glossary/#series-cardinality
## Name of tag to store the name of the file. Disabled if not set.
# file_tag = ""
## Name of tag to store the absolute path and name of the file. Disabled if
## not set.
# file_path_tag = ""
```
## Metrics
The format of metrics produced by this plugin depends on the content and data
format of the file.
## Example Output

View file

@ -0,0 +1,13 @@
version: '3'
services:
telegraf:
image: glinton/scratch
volumes:
- ./telegraf.conf:/telegraf.conf
- ../../../../telegraf:/telegraf
- ./dev/json_a.log:/var/log/test.log
entrypoint:
- /telegraf
- --config
- /telegraf.conf

View file

@ -0,0 +1,7 @@
[[inputs.file]]
files = ["/var/log/test.log"]
data_format = "json"
name_override = "json_file"
[[outputs.file]]
files = ["stdout"]

View file

@ -0,0 +1,2 @@
127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326
128.0.0.1 user-identifier tony [10/Oct/2000:13:55:36 -0800] "GET /apache_pb.gif HTTP/1.0" 300 45

View file

@ -0,0 +1,14 @@
{
"parent": {
"child": 3.0,
"ignored_child": "hi"
},
"ignored_null": null,
"integer": 4,
"list": [3, 4],
"ignored_parent": {
"another_ignored_null": null,
"ignored_string": "hello, world!"
},
"another_list": [4]
}

129
plugins/inputs/file/file.go Normal file
View file

@ -0,0 +1,129 @@
//go:generate ../../../tools/readme_config_includer/generator
package file
import (
_ "embed"
"fmt"
"io"
"os"
"path/filepath"
"sync"
"github.com/dimchansky/utfbom"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/internal/globpath"
"github.com/influxdata/telegraf/plugins/common/encoding"
"github.com/influxdata/telegraf/plugins/inputs"
)
//go:embed sample.conf
var sampleConfig string
var once sync.Once
type File struct {
Files []string `toml:"files"`
FileTag string `toml:"file_tag"`
FilePathTag string `toml:"file_path_tag"`
CharacterEncoding string `toml:"character_encoding"`
Log telegraf.Logger `toml:"-"`
parserFunc telegraf.ParserFunc
filenames []string
decoder *encoding.Decoder
}
func (*File) SampleConfig() string {
return sampleConfig
}
func (f *File) Init() error {
var err error
f.decoder, err = encoding.NewDecoder(f.CharacterEncoding)
return err
}
func (f *File) SetParserFunc(fn telegraf.ParserFunc) {
f.parserFunc = fn
}
func (f *File) Gather(acc telegraf.Accumulator) error {
err := f.refreshFilePaths()
if err != nil {
return err
}
for _, k := range f.filenames {
metrics, err := f.readMetric(k)
if err != nil {
return err
}
for _, m := range metrics {
if f.FileTag != "" {
m.AddTag(f.FileTag, filepath.Base(k))
}
if f.FilePathTag != "" {
if absPath, err := filepath.Abs(k); err == nil {
m.AddTag(f.FilePathTag, absPath)
}
}
acc.AddMetric(m)
}
}
return nil
}
func (f *File) refreshFilePaths() error {
var allFiles []string
for _, file := range f.Files {
g, err := globpath.Compile(file)
if err != nil {
return fmt.Errorf("could not compile glob %q: %w", file, err)
}
files := g.Match()
if len(files) == 0 {
return fmt.Errorf("could not find file(s): %v", file)
}
allFiles = append(allFiles, files...)
}
f.filenames = allFiles
return nil
}
func (f *File) readMetric(filename string) ([]telegraf.Metric, error) {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
r, _ := utfbom.Skip(f.decoder.Reader(file))
fileContents, err := io.ReadAll(r)
if err != nil {
return nil, fmt.Errorf("could not read %q: %w", filename, err)
}
parser, err := f.parserFunc()
if err != nil {
return nil, fmt.Errorf("could not instantiate parser: %w", err)
}
metrics, err := parser.Parse(fileContents)
if err != nil {
return metrics, fmt.Errorf("could not parse %q: %w", filename, err)
}
if len(metrics) == 0 {
once.Do(func() {
f.Log.Debug(internal.NoMetricsCreatedMsg)
})
}
return metrics, err
}
func init() {
inputs.Add("file", func() telegraf.Input {
return &File{}
})
}

View file

@ -0,0 +1,466 @@
//go:build !windows
// TODO: Windows - should be enabled for Windows when super asterisk is fixed on Windows
// https://github.com/influxdata/telegraf/issues/6248
package file
import (
"os"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric"
"github.com/influxdata/telegraf/plugins/parsers/csv"
"github.com/influxdata/telegraf/plugins/parsers/grok"
"github.com/influxdata/telegraf/plugins/parsers/json"
"github.com/influxdata/telegraf/testutil"
)
func TestRefreshFilePaths(t *testing.T) {
wd, err := os.Getwd()
require.NoError(t, err)
r := File{
Files: []string{filepath.Join(wd, "dev", "testfiles", "**.log")},
Log: testutil.Logger{},
}
err = r.Init()
require.NoError(t, err)
err = r.refreshFilePaths()
require.NoError(t, err)
require.Len(t, r.filenames, 2)
}
func TestFileTag(t *testing.T) {
acc := testutil.Accumulator{}
wd, err := os.Getwd()
require.NoError(t, err)
r := File{
Files: []string{filepath.Join(wd, "dev", "testfiles", "json_a.log")},
FileTag: "filename",
FilePathTag: "filepath",
Log: testutil.Logger{},
}
require.NoError(t, r.Init())
r.SetParserFunc(func() (telegraf.Parser, error) {
p := &json.Parser{}
err := p.Init()
return p, err
})
require.NoError(t, r.Gather(&acc))
for _, m := range acc.Metrics {
require.Contains(t, m.Tags, "filename")
require.Equal(t, filepath.Base(r.Files[0]), m.Tags["filename"])
require.Contains(t, m.Tags, "filepath")
require.True(t, filepath.IsAbs(m.Tags["filepath"]))
}
}
func TestJSONParserCompile(t *testing.T) {
var acc testutil.Accumulator
wd, err := os.Getwd()
require.NoError(t, err)
r := File{
Files: []string{filepath.Join(wd, "dev", "testfiles", "json_a.log")},
Log: testutil.Logger{},
}
require.NoError(t, r.Init())
r.SetParserFunc(func() (telegraf.Parser, error) {
p := &json.Parser{TagKeys: []string{"parent_ignored_child"}}
err := p.Init()
return p, err
})
require.NoError(t, r.Gather(&acc))
require.Equal(t, map[string]string{"parent_ignored_child": "hi"}, acc.Metrics[0].Tags)
require.Len(t, acc.Metrics[0].Fields, 5)
}
func TestGrokParser(t *testing.T) {
wd, err := os.Getwd()
require.NoError(t, err)
var acc testutil.Accumulator
r := File{
Files: []string{filepath.Join(wd, "dev", "testfiles", "grok_a.log")},
Log: testutil.Logger{},
}
err = r.Init()
require.NoError(t, err)
r.SetParserFunc(func() (telegraf.Parser, error) {
parser := &grok.Parser{
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
Log: testutil.Logger{},
}
err := parser.Init()
return parser, err
})
err = r.Gather(&acc)
require.NoError(t, err)
require.Len(t, acc.Metrics, 2)
}
func TestCharacterEncoding(t *testing.T) {
expected := []telegraf.Metric{
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "1",
"ip": "12.122.114.5",
},
map[string]interface{}{
"avg": 21.55,
"best": 19.34,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 2.05,
"worst": 26.83,
},
time.Unix(0, 0),
),
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "2",
"ip": "192.205.32.238",
},
map[string]interface{}{
"avg": 25.11,
"best": 20.8,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 6.03,
"worst": 38.85,
},
time.Unix(0, 0),
),
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "3",
"ip": "152.195.85.133",
},
map[string]interface{}{
"avg": 20.18,
"best": 19.75,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 0.0,
"worst": 20.78,
},
time.Unix(0, 0),
),
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "4",
"ip": "93.184.216.34",
},
map[string]interface{}{
"avg": 24.02,
"best": 19.75,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 4.67,
"worst": 32.41,
},
time.Unix(0, 0),
),
}
tests := []struct {
name string
plugin *File
csv csv.Parser
file string
}{
{
name: "empty character_encoding with utf-8",
plugin: &File{
Files: []string{"testdata/mtr-utf-8.csv"},
CharacterEncoding: "",
Log: testutil.Logger{},
},
csv: csv.Parser{
MetricName: "file",
SkipRows: 1,
ColumnNames: []string{"", "", "status", "dest", "hop", "ip", "loss", "snt", "", "", "avg", "best", "worst", "stdev"},
TagColumns: []string{"dest", "hop", "ip"},
},
},
{
name: "utf-8 character_encoding with utf-8",
plugin: &File{
Files: []string{"testdata/mtr-utf-8.csv"},
CharacterEncoding: "utf-8",
Log: testutil.Logger{},
},
csv: csv.Parser{
MetricName: "file",
SkipRows: 1,
ColumnNames: []string{"", "", "status", "dest", "hop", "ip", "loss", "snt", "", "", "avg", "best", "worst", "stdev"},
TagColumns: []string{"dest", "hop", "ip"},
},
},
{
name: "utf-16le character_encoding with utf-16le",
plugin: &File{
Files: []string{"testdata/mtr-utf-16le.csv"},
CharacterEncoding: "utf-16le",
Log: testutil.Logger{},
},
csv: csv.Parser{
MetricName: "file",
SkipRows: 1,
ColumnNames: []string{"", "", "status", "dest", "hop", "ip", "loss", "snt", "", "", "avg", "best", "worst", "stdev"},
TagColumns: []string{"dest", "hop", "ip"},
},
},
{
name: "utf-16be character_encoding with utf-16be",
plugin: &File{
Files: []string{"testdata/mtr-utf-16be.csv"},
CharacterEncoding: "utf-16be",
Log: testutil.Logger{},
},
csv: csv.Parser{
MetricName: "file",
SkipRows: 1,
ColumnNames: []string{"", "", "status", "dest", "hop", "ip", "loss", "snt", "", "", "avg", "best", "worst", "stdev"},
TagColumns: []string{"dest", "hop", "ip"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.plugin.Init()
require.NoError(t, err)
tt.plugin.SetParserFunc(func() (telegraf.Parser, error) {
parser := tt.csv
err := parser.Init()
return &parser, err
})
var acc testutil.Accumulator
err = tt.plugin.Gather(&acc)
require.NoError(t, err)
testutil.RequireMetricsEqual(t, expected, acc.GetTelegrafMetrics(), testutil.IgnoreTime())
})
}
}
func TestStatefulParsers(t *testing.T) {
expected := []telegraf.Metric{
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "1",
"ip": "12.122.114.5",
},
map[string]interface{}{
"avg": 21.55,
"best": 19.34,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 2.05,
"worst": 26.83,
},
time.Unix(0, 0),
),
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "2",
"ip": "192.205.32.238",
},
map[string]interface{}{
"avg": 25.11,
"best": 20.8,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 6.03,
"worst": 38.85,
},
time.Unix(0, 0),
),
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "3",
"ip": "152.195.85.133",
},
map[string]interface{}{
"avg": 20.18,
"best": 19.75,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 0.0,
"worst": 20.78,
},
time.Unix(0, 0),
),
testutil.MustMetric("file",
map[string]string{
"dest": "example.org",
"hop": "4",
"ip": "93.184.216.34",
},
map[string]interface{}{
"avg": 24.02,
"best": 19.75,
"loss": 0.0,
"snt": 10,
"status": "OK",
"stdev": 4.67,
"worst": 32.41,
},
time.Unix(0, 0),
),
}
tests := []struct {
name string
plugin *File
csv csv.Parser
file string
count int
}{
{
name: "read file twice",
plugin: &File{
Files: []string{"testdata/mtr-utf-8.csv"},
CharacterEncoding: "",
Log: testutil.Logger{},
},
csv: csv.Parser{
MetricName: "file",
SkipRows: 1,
ColumnNames: []string{"", "", "status", "dest", "hop", "ip", "loss", "snt", "", "", "avg", "best", "worst", "stdev"},
TagColumns: []string{"dest", "hop", "ip"},
},
count: 2,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.plugin.Init()
require.NoError(t, err)
tt.plugin.SetParserFunc(func() (telegraf.Parser, error) {
parser := tt.csv
err := parser.Init()
return &parser, err
})
var acc testutil.Accumulator
for i := 0; i < tt.count; i++ {
require.NoError(t, tt.plugin.Gather(&acc))
testutil.RequireMetricsEqual(t, expected, acc.GetTelegrafMetrics(), testutil.IgnoreTime())
acc.ClearMetrics()
}
})
}
}
func TestCSVBehavior(t *testing.T) {
// Setup the CSV parser creator function
parserFunc := func() (telegraf.Parser, error) {
parser := &csv.Parser{
MetricName: "file",
HeaderRowCount: 1,
}
err := parser.Init()
return parser, err
}
// Setup the plugin
plugin := &File{
Files: []string{filepath.Join("testdata", "csv_behavior_input.csv")},
Log: testutil.Logger{},
}
plugin.SetParserFunc(parserFunc)
require.NoError(t, plugin.Init())
expected := []telegraf.Metric{
metric.New(
"file",
map[string]string{},
map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
time.Unix(0, 1),
),
metric.New(
"file",
map[string]string{},
map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
time.Unix(0, 2),
),
metric.New(
"file",
map[string]string{},
map[string]interface{}{
"a": int64(1),
"b": int64(2),
},
time.Unix(0, 3),
),
metric.New(
"file",
map[string]string{},
map[string]interface{}{
"a": int64(3),
"b": int64(4),
},
time.Unix(0, 4),
),
}
var acc testutil.Accumulator
// Run gather once
require.NoError(t, plugin.Gather(&acc))
// Run gather a second time
require.NoError(t, plugin.Gather(&acc))
require.Eventuallyf(t, func() bool {
acc.Lock()
defer acc.Unlock()
return acc.NMetrics() >= uint64(len(expected))
}, time.Second, 100*time.Millisecond, "Expected %d metrics found %d", len(expected), acc.NMetrics())
// Check the result
options := []cmp.Option{
testutil.SortMetrics(),
testutil.IgnoreTime(),
}
actual := acc.GetTelegrafMetrics()
testutil.RequireMetricsEqual(t, expected, actual, options...)
}

View file

@ -0,0 +1,32 @@
# Parse a complete file each interval
[[inputs.file]]
## Files to parse each interval. Accept standard unix glob matching rules,
## as well as ** to match recursive files and directories.
files = ["/tmp/metrics.out"]
## Character encoding to use when interpreting the file contents. Invalid
## characters are replaced using the unicode replacement character. When set
## to the empty string the data is not decoded to text.
## ex: character_encoding = "utf-8"
## character_encoding = "utf-16le"
## character_encoding = "utf-16be"
## character_encoding = ""
# character_encoding = ""
## Data format to consume.
## Each data format has its own unique set of configuration options, read
## more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "influx"
## Please use caution when using the following options: when file name
## variation is high, this can increase the cardinality significantly. Read
## more about cardinality here:
## https://docs.influxdata.com/influxdb/cloud/reference/glossary/#series-cardinality
## Name of tag to store the name of the file. Disabled if not set.
# file_tag = ""
## Name of tag to store the absolute path and name of the file. Disabled if
## not set.
# file_path_tag = ""

View file

@ -0,0 +1,3 @@
a,b
1,2
3,4
1 a b
2 1 2
3 3 4

Binary file not shown.
1 Mtr_Version,Start_Time,Status,Host,Hop,Ip,Loss%,Snt, ,Last,Avg,Best,Wrst,StDev,
2 MTR.0.87,1593667013,OK,example.org,1,12.122.114.5,0.00,10,0,21.86,21.55,19.34,26.83,2.05
3 MTR.0.87,1593667013,OK,example.org,2,192.205.32.238,0.00,10,0,32.83,25.11,20.80,38.85,6.03
4 MTR.0.87,1593667013,OK,example.org,3,152.195.85.133,0.00,10,0,19.75,20.18,19.75,20.78,0.00
5 MTR.0.87,1593667013,OK,example.org,4,93.184.216.34,0.00,10,0,19.75,24.02,19.75,32.41,4.67

Binary file not shown.
1 Mtr_Version,Start_Time,Status,Host,Hop,Ip,Loss%,Snt, ,Last,Avg,Best,Wrst,StDev,
2 MTR.0.87,1593667013,OK,example.org,1,12.122.114.5,0.00,10,0,21.86,21.55,19.34,26.83,2.05
3 MTR.0.87,1593667013,OK,example.org,2,192.205.32.238,0.00,10,0,32.83,25.11,20.80,38.85,6.03
4 MTR.0.87,1593667013,OK,example.org,3,152.195.85.133,0.00,10,0,19.75,20.18,19.75,20.78,0.00
5 MTR.0.87,1593667013,OK,example.org,4,93.184.216.34,0.00,10,0,19.75,24.02,19.75,32.41,4.67

View file

@ -0,0 +1,5 @@
Mtr_Version,Start_Time,Status,Host,Hop,Ip,Loss%,Snt, ,Last,Avg,Best,Wrst,StDev,
MTR.0.87,1593667013,OK,example.org,1,12.122.114.5,0.00,10,0,21.86,21.55,19.34,26.83,2.05
MTR.0.87,1593667013,OK,example.org,2,192.205.32.238,0.00,10,0,32.83,25.11,20.80,38.85,6.03
MTR.0.87,1593667013,OK,example.org,3,152.195.85.133,0.00,10,0,19.75,20.18,19.75,20.78,0.00
MTR.0.87,1593667013,OK,example.org,4,93.184.216.34,0.00,10,0,19.75,24.02,19.75,32.41,4.67
1 Mtr_Version,Start_Time,Status,Host,Hop,Ip,Loss%,Snt, ,Last,Avg,Best,Wrst,StDev,
2 MTR.0.87,1593667013,OK,example.org,1,12.122.114.5,0.00,10,0,21.86,21.55,19.34,26.83,2.05
3 MTR.0.87,1593667013,OK,example.org,2,192.205.32.238,0.00,10,0,32.83,25.11,20.80,38.85,6.03
4 MTR.0.87,1593667013,OK,example.org,3,152.195.85.133,0.00,10,0,19.75,20.18,19.75,20.78,0.00
5 MTR.0.87,1593667013,OK,example.org,4,93.184.216.34,0.00,10,0,19.75,24.02,19.75,32.41,4.67