1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,70 @@
# Datadog Output Plugin
This plugin writes metrics to the [Datadog Metrics API][metrics] and requires an
`apikey` which can be obtained [here][apikey] for the account.
> [!NOTE]
> This plugin supports the v1 API.
⭐ Telegraf v0.1.6
🏷️ applications, cloud, datastore
💻 all
## Global configuration options <!-- @/docs/includes/plugin_config.md -->
In addition to the plugin-specific configuration settings, plugins support
additional global and plugin configuration settings. These settings are used to
modify metrics, tags, and field or create aliases and configure ordering, etc.
See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
[CONFIGURATION.md]: ../../../docs/CONFIGURATION.md#plugins
## Configuration
```toml @sample.conf
# Configuration for DataDog API to send metrics to.
[[outputs.datadog]]
## Datadog API key
apikey = "my-secret-key"
## Connection timeout.
# timeout = "5s"
## Write URL override; useful for debugging.
## This plugin only supports the v1 API currently due to the authentication
## method used.
# url = "https://app.datadoghq.com/api/v1/series"
## Set http_proxy
# use_system_proxy = false
# http_proxy_url = "http://localhost:8888"
## Override the default (none) compression used to send data.
## Supports: "zlib", "none"
# compression = "none"
## When non-zero, converts count metrics submitted by inputs.statsd
## into rate, while dividing the metric value by this number.
## Note that in order for metrics to be submitted simultaenously alongside
## a Datadog agent, rate_interval has to match the interval used by the
## agent - which defaults to 10s
# rate_interval = 0s
```
## Metrics
Datadog metric names are formed by joining the Telegraf metric name and the
field key with a `.` character.
Field values are converted to floating point numbers. Strings and floats that
cannot be sent over JSON, namely NaN and Inf, are ignored.
Setting `rate_interval` to non-zero will convert `count` metrics to `rate`
and divide its value by this interval before submitting to Datadog.
This allows Telegraf to submit metrics alongside Datadog agents when their rate
intervals are the same (Datadog defaults to `10s`).
Note that this only supports metrics ingested via `inputs.statsd` given
the dependency on the `metric_type` tag it creates. There is only support for
`counter` metrics, and `count` values from `timing` and `histogram` metrics.
[metrics]: https://docs.datadoghq.com/api/v1/metrics/#submit-metrics
[apikey]: https://app.datadoghq.com/account/settings#api

View file

@ -0,0 +1,284 @@
//go:generate ../../../tools/readme_config_includer/generator
package datadog
import (
"bytes"
_ "embed"
"encoding/json"
"errors"
"fmt"
"io"
"math"
"net/http"
"net/url"
"strings"
"time"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/common/proxy"
"github.com/influxdata/telegraf/plugins/outputs"
)
//go:embed sample.conf
var sampleConfig string
type Datadog struct {
Apikey string `toml:"apikey"`
Timeout config.Duration `toml:"timeout"`
URL string `toml:"url"`
Compression string `toml:"compression"`
RateInterval config.Duration `toml:"rate_interval"`
Log telegraf.Logger `toml:"-"`
client *http.Client
proxy.HTTPProxy
}
type TimeSeries struct {
Series []*Metric `json:"series"`
}
type Metric struct {
Metric string `json:"metric"`
Points [1]Point `json:"points"`
Host string `json:"host"`
Type string `json:"type,omitempty"`
Tags []string `json:"tags,omitempty"`
Interval int64 `json:"interval"`
}
type Point [2]float64
const datadogAPI = "https://app.datadoghq.com/api/v1/series"
func (*Datadog) SampleConfig() string {
return sampleConfig
}
func (d *Datadog) Connect() error {
if d.Apikey == "" {
return errors.New("apikey is a required field for datadog output")
}
proxyFunc, err := d.Proxy()
if err != nil {
return err
}
d.client = &http.Client{
Transport: &http.Transport{
Proxy: proxyFunc,
},
Timeout: time.Duration(d.Timeout),
}
return nil
}
func (d *Datadog) convertToDatadogMetric(metrics []telegraf.Metric) []*Metric {
tempSeries := make([]*Metric, 0, len(metrics))
for _, m := range metrics {
if dogMs, err := buildMetrics(m); err == nil {
metricTags := buildTags(m.TagList())
host, _ := m.GetTag("host")
// Retrieve the metric_type tag created by inputs.statsd
statsDMetricType, _ := m.GetTag("metric_type")
if len(dogMs) == 0 {
continue
}
for fieldName, dogM := range dogMs {
// name of the datadog measurement
var dname string
if fieldName == "value" {
// adding .value seems redundant here
dname = m.Name()
} else {
dname = m.Name() + "." + fieldName
}
var tname string
var interval int64
interval = 1
switch m.Type() {
case telegraf.Counter, telegraf.Untyped:
if d.RateInterval > 0 && isRateable(statsDMetricType, fieldName) {
// interval is expected to be in seconds
rateIntervalSeconds := time.Duration(d.RateInterval).Seconds()
interval = int64(rateIntervalSeconds)
dogM[1] = dogM[1] / rateIntervalSeconds
tname = "rate"
} else if m.Type() == telegraf.Counter {
tname = "count"
} else {
tname = ""
}
case telegraf.Gauge:
tname = "gauge"
default:
tname = ""
}
metric := &Metric{
Metric: dname,
Tags: metricTags,
Host: host,
Type: tname,
Interval: interval,
}
metric.Points[0] = dogM
tempSeries = append(tempSeries, metric)
}
} else {
d.Log.Infof("Unable to build Metric for %s due to error '%v', skipping", m.Name(), err)
}
}
return tempSeries
}
func (d *Datadog) Write(metrics []telegraf.Metric) error {
ts := TimeSeries{}
tempSeries := d.convertToDatadogMetric(metrics)
if len(tempSeries) == 0 {
return nil
}
redactedAPIKey := "****************"
ts.Series = make([]*Metric, len(tempSeries))
copy(ts.Series, tempSeries[0:])
tsBytes, err := json.Marshal(ts)
if err != nil {
return fmt.Errorf("unable to marshal TimeSeries: %w", err)
}
var req *http.Request
c := strings.ToLower(d.Compression)
switch c {
case "zlib":
encoder, err := internal.NewContentEncoder(c)
if err != nil {
return err
}
buf, err := encoder.Encode(tsBytes)
if err != nil {
return err
}
req, err = http.NewRequest("POST", d.authenticatedURL(), bytes.NewBuffer(buf))
if err != nil {
return err
}
req.Header.Set("Content-Encoding", "deflate")
case "none":
fallthrough
default:
req, err = http.NewRequest("POST", d.authenticatedURL(), bytes.NewBuffer(tsBytes))
}
if err != nil {
return fmt.Errorf("unable to create http.Request, %s", strings.ReplaceAll(err.Error(), d.Apikey, redactedAPIKey))
}
req.Header.Add("Content-Type", "application/json")
resp, err := d.client.Do(req)
if err != nil {
return fmt.Errorf("error POSTing metrics, %s", strings.ReplaceAll(err.Error(), d.Apikey, redactedAPIKey))
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode > 209 {
//nolint:errcheck // err can be ignored since it is just for logging
body, _ := io.ReadAll(resp.Body)
return fmt.Errorf("received bad status code, %d: %s", resp.StatusCode, string(body))
}
return nil
}
func (d *Datadog) authenticatedURL() string {
q := url.Values{
"api_key": []string{d.Apikey},
}
return fmt.Sprintf("%s?%s", d.URL, q.Encode())
}
func buildMetrics(m telegraf.Metric) (map[string]Point, error) {
ms := make(map[string]Point)
for _, field := range m.FieldList() {
if !verifyValue(field.Value) {
continue
}
var p Point
if err := p.setValue(field.Value); err != nil {
return ms, fmt.Errorf("unable to extract value from Field %v: %w", field.Key, err)
}
p[0] = float64(m.Time().Unix())
ms[field.Key] = p
}
return ms, nil
}
func buildTags(tagList []*telegraf.Tag) []string {
tags := make([]string, 0, len(tagList))
for _, tag := range tagList {
tags = append(tags, fmt.Sprintf("%s:%s", tag.Key, tag.Value))
}
return tags
}
func verifyValue(v interface{}) bool {
switch v := v.(type) {
case string:
return false
case float64:
// The payload will be encoded as JSON, which does not allow NaN or Inf.
return !math.IsNaN(v) && !math.IsInf(v, 0)
}
return true
}
func isRateable(statsDMetricType, fieldName string) bool {
switch statsDMetricType {
case
"counter":
return true
case
"timing",
"histogram":
return fieldName == "count"
default:
return false
}
}
func (p *Point) setValue(v interface{}) error {
switch d := v.(type) {
case int64:
p[1] = float64(d)
case uint64:
p[1] = float64(d)
case float64:
p[1] = d
case bool:
p[1] = float64(0)
if d {
p[1] = float64(1)
}
default:
return fmt.Errorf("undeterminable field type: %T", v)
}
return nil
}
func (*Datadog) Close() error {
return nil
}
func init() {
outputs.Add("datadog", func() telegraf.Output {
return &Datadog{
URL: datadogAPI,
Compression: "none",
}
})
}

View file

@ -0,0 +1,901 @@
package datadog
import (
"encoding/json"
"fmt"
"math"
"net/http"
"net/http/httptest"
"reflect"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/testutil"
)
var (
fakeURL = "http://test.datadog.com"
fakeAPIKey = "123456"
)
func NewDatadog(url string) *Datadog {
return &Datadog{
URL: url,
Log: testutil.Logger{},
}
}
func fakeDatadog() *Datadog {
d := NewDatadog(fakeURL)
d.Apikey = fakeAPIKey
return d
}
func TestUriOverride(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(`{"status":"ok"}`) //nolint:errcheck // Ignore the returned error as the test will fail anyway
}))
defer ts.Close()
d := NewDatadog(ts.URL)
d.Apikey = "123456"
err := d.Connect()
require.NoError(t, err)
err = d.Write(testutil.MockMetrics())
require.NoError(t, err)
}
func TestCompressionOverride(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(`{"status":"ok"}`) //nolint:errcheck // Ignore the returned error as the test will fail anyway
}))
defer ts.Close()
d := NewDatadog(ts.URL)
d.Apikey = "123456"
d.Compression = "zlib"
err := d.Connect()
require.NoError(t, err)
err = d.Write(testutil.MockMetrics())
require.NoError(t, err)
}
func TestBadStatusCode(t *testing.T) {
errorString := `{"errors": ["Something bad happened to the server.", "Your query made the server very sad."]}`
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusInternalServerError)
fmt.Fprint(w, errorString)
}))
defer ts.Close()
d := NewDatadog(ts.URL)
d.Apikey = "123456"
err := d.Connect()
require.NoError(t, err)
err = d.Write(testutil.MockMetrics())
if err == nil {
t.Errorf("error expected but none returned")
} else {
require.EqualError(t, err, fmt.Sprintf("received bad status code, %v: %s", http.StatusInternalServerError, errorString))
}
}
func TestAuthenticatedUrl(t *testing.T) {
d := fakeDatadog()
authURL := d.authenticatedURL()
require.EqualValues(t, fmt.Sprintf("%s?api_key=%s", fakeURL, fakeAPIKey), authURL)
}
func TestBuildTags(t *testing.T) {
var tagtests = []struct {
ptIn []*telegraf.Tag
outTags []string
}{
{
ptIn: []*telegraf.Tag{
{
Key: "one",
Value: "two",
},
{
Key: "three",
Value: "four",
},
},
outTags: []string{"one:two", "three:four"},
},
{
ptIn: []*telegraf.Tag{
{
Key: "aaa",
Value: "bbb",
},
},
outTags: []string{"aaa:bbb"},
},
{
ptIn: make([]*telegraf.Tag, 0),
outTags: make([]string, 0),
},
}
for _, tt := range tagtests {
tags := buildTags(tt.ptIn)
if !reflect.DeepEqual(tags, tt.outTags) {
t.Errorf("\nexpected %+v\ngot %+v\n", tt.outTags, tags)
}
}
}
func TestBuildPoint(t *testing.T) {
var tagtests = []struct {
ptIn telegraf.Metric
outPt Point
err error
}{
{
testutil.TestMetric(0.0, "test1"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
0.0,
},
nil,
},
{
testutil.TestMetric(1.0, "test2"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
1.0,
},
nil,
},
{
testutil.TestMetric(10, "test3"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
10.0,
},
nil,
},
{
testutil.TestMetric(int32(112345), "test4"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
112345.0,
},
nil,
},
{
testutil.TestMetric(int64(112345), "test5"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
112345.0,
},
nil,
},
{
testutil.TestMetric(float32(11234.5), "test6"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
11234.5,
},
nil,
},
{
testutil.TestMetric(true, "test7"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
1.0,
},
nil,
},
{
testutil.TestMetric(false, "test8"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
0.0,
},
nil,
},
{
testutil.TestMetric(int64(0), "test int64"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
0.0,
},
nil,
},
{
testutil.TestMetric(uint64(0), "test uint64"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
0.0,
},
nil,
},
{
testutil.TestMetric(true, "test bool"),
Point{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
1.0,
},
nil,
},
}
for _, tt := range tagtests {
pt, err := buildMetrics(tt.ptIn)
if err != nil && tt.err == nil {
t.Errorf("%s: unexpected error, %+v\n", tt.ptIn.Name(), err)
}
if tt.err != nil && err == nil {
t.Errorf("%s: expected an error (%s) but none returned", tt.ptIn.Name(), tt.err.Error())
}
if !reflect.DeepEqual(pt["value"], tt.outPt) && tt.err == nil {
t.Errorf("%s: \nexpected %+v\ngot %+v\n",
tt.ptIn.Name(), tt.outPt, pt["value"])
}
}
}
func TestVerifyValue(t *testing.T) {
var tagtests = []struct {
ptIn telegraf.Metric
validMetric bool
}{
{
testutil.TestMetric(float32(11234.5), "test1"),
true,
},
{
testutil.TestMetric("11234.5", "test2"),
false,
},
}
for _, tt := range tagtests {
ok := verifyValue(tt.ptIn.Fields()["value"])
if tt.validMetric != ok {
t.Errorf("%s: verification failed\n", tt.ptIn.Name())
}
}
}
func TestNaNIsSkipped(t *testing.T) {
plugin := &Datadog{
Apikey: "testing",
URL: "", // No request will be sent because all fields are skipped
}
err := plugin.Connect()
require.NoError(t, err)
err = plugin.Write([]telegraf.Metric{
testutil.MustMetric(
"cpu",
map[string]string{},
map[string]interface{}{
"time_idle": math.NaN(),
},
time.Now()),
})
require.NoError(t, err)
}
func TestInfIsSkipped(t *testing.T) {
plugin := &Datadog{
Apikey: "testing",
URL: "", // No request will be sent because all fields are skipped
}
err := plugin.Connect()
require.NoError(t, err)
err = plugin.Write([]telegraf.Metric{
testutil.MustMetric(
"cpu",
map[string]string{},
map[string]interface{}{
"time_idle": math.Inf(0),
},
time.Now()),
})
require.NoError(t, err)
}
func TestNonZeroRateIntervalConvertsRatesToCount(t *testing.T) {
d := &Datadog{
Apikey: "123456",
RateInterval: config.Duration(10 * time.Second),
}
var tests = []struct {
name string
metricsIn []telegraf.Metric
metricsOut []*Metric
}{
{
"convert counter metrics to rate",
[]telegraf.Metric{
testutil.MustMetric(
"count_metric",
map[string]string{
"metric_type": "counter",
},
map[string]interface{}{
"value": 100,
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
telegraf.Counter,
),
},
[]*Metric{
{
Metric: "count_metric",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
10,
},
},
Type: "rate",
Tags: []string{
"metric_type:counter",
},
Interval: 10,
},
},
},
{
"convert count value in timing metrics to rate",
[]telegraf.Metric{
testutil.MustMetric(
"timing_metric",
map[string]string{
"metric_type": "timing",
},
map[string]interface{}{
"count": 1,
"lower": float64(10),
"mean": float64(10),
"median": float64(10),
"stddev": float64(0),
"sum": float64(10),
"upper": float64(10),
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
telegraf.Untyped,
),
},
[]*Metric{
{
Metric: "timing_metric.count",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
0.1,
},
},
Type: "rate",
Tags: []string{
"metric_type:timing",
},
Interval: 10,
},
{
Metric: "timing_metric.lower",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.mean",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.median",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.stddev",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(0),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.sum",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.upper",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
},
},
{
"convert count value in histogram metrics to rate",
[]telegraf.Metric{
testutil.MustMetric(
"histogram_metric",
map[string]string{
"metric_type": "histogram",
},
map[string]interface{}{
"count": 1,
"lower": float64(10),
"mean": float64(10),
"median": float64(10),
"stddev": float64(0),
"sum": float64(10),
"upper": float64(10),
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
telegraf.Untyped,
),
},
[]*Metric{
{
Metric: "histogram_metric.count",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
0.1,
},
},
Type: "rate",
Tags: []string{
"metric_type:histogram",
},
Interval: 10,
},
{
Metric: "histogram_metric.lower",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.mean",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.median",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.stddev",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(0),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.sum",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.upper",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
actualMetricsOut := d.convertToDatadogMetric(tt.metricsIn)
require.ElementsMatch(t, tt.metricsOut, actualMetricsOut)
})
}
}
func TestZeroRateIntervalConvertsRatesToCount(t *testing.T) {
d := &Datadog{
Apikey: "123456",
}
var tests = []struct {
name string
metricsIn []telegraf.Metric
metricsOut []*Metric
}{
{
"does not convert counter metrics to rate",
[]telegraf.Metric{
testutil.MustMetric(
"count_metric",
map[string]string{
"metric_type": "counter",
},
map[string]interface{}{
"value": 100,
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
telegraf.Counter,
),
},
[]*Metric{
{
Metric: "count_metric",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
100,
},
},
Type: "count",
Tags: []string{
"metric_type:counter",
},
Interval: 1,
},
},
},
{
"does not convert count value in timing metrics to rate",
[]telegraf.Metric{
testutil.MustMetric(
"timing_metric",
map[string]string{
"metric_type": "timing",
},
map[string]interface{}{
"count": 1,
"lower": float64(10),
"mean": float64(10),
"median": float64(10),
"stddev": float64(0),
"sum": float64(10),
"upper": float64(10),
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
telegraf.Untyped,
),
},
[]*Metric{
{
Metric: "timing_metric.count",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
1,
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.lower",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.mean",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.median",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.stddev",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(0),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.sum",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
{
Metric: "timing_metric.upper",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:timing",
},
Interval: 1,
},
},
},
{
"does not convert count value in histogram metrics to rate",
[]telegraf.Metric{
testutil.MustMetric(
"histogram_metric",
map[string]string{
"metric_type": "histogram",
},
map[string]interface{}{
"count": 1,
"lower": float64(10),
"mean": float64(10),
"median": float64(10),
"stddev": float64(0),
"sum": float64(10),
"upper": float64(10),
},
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
telegraf.Untyped,
),
},
[]*Metric{
{
Metric: "histogram_metric.count",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
1,
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.lower",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.mean",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.median",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.stddev",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(0),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.sum",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
{
Metric: "histogram_metric.upper",
Points: [1]Point{
{
float64(time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC).Unix()),
float64(10),
},
},
Type: "",
Tags: []string{
"metric_type:histogram",
},
Interval: 1,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
actualMetricsOut := d.convertToDatadogMetric(tt.metricsIn)
require.ElementsMatch(t, tt.metricsOut, actualMetricsOut)
})
}
}

View file

@ -0,0 +1,27 @@
# Configuration for DataDog API to send metrics to.
[[outputs.datadog]]
## Datadog API key
apikey = "my-secret-key"
## Connection timeout.
# timeout = "5s"
## Write URL override; useful for debugging.
## This plugin only supports the v1 API currently due to the authentication
## method used.
# url = "https://app.datadoghq.com/api/v1/series"
## Set http_proxy
# use_system_proxy = false
# http_proxy_url = "http://localhost:8888"
## Override the default (none) compression used to send data.
## Supports: "zlib", "none"
# compression = "none"
## When non-zero, converts count metrics submitted by inputs.statsd
## into rate, while dividing the metric value by this number.
## Note that in order for metrics to be submitted simultaenously alongside
## a Datadog agent, rate_interval has to match the interval used by the
## agent - which defaults to 10s
# rate_interval = 0s