1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,82 @@
# Google Cloud PubSub Output Plugin
This plugin publishes metrics to a [Google Cloud PubSub][pubsub] topic in one
of the supported [data formats][data_formats].
⭐ Telegraf v1.10.0
🏷️ cloud, messaging
💻 all
## Global configuration options <!-- @/docs/includes/plugin_config.md -->
In addition to the plugin-specific configuration settings, plugins support
additional global and plugin configuration settings. These settings are used to
modify metrics, tags, and field or create aliases and configure ordering, etc.
See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
[CONFIGURATION.md]: ../../../docs/CONFIGURATION.md#plugins
## Configuration
```toml @sample.conf
# Publish Telegraf metrics to a Google Cloud PubSub topic
[[outputs.cloud_pubsub]]
## Required. Name of Google Cloud Platform (GCP) Project that owns
## the given PubSub topic.
project = "my-project"
## Required. Name of PubSub topic to publish metrics to.
topic = "my-topic"
## Content encoding for message payloads, can be set to "gzip" or
## "identity" to apply no encoding.
# content_encoding = "identity"
## Required. Data format to consume.
## Each data format has its own unique set of configuration options.
## Read more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "influx"
## Optional. Filepath for GCP credentials JSON file to authorize calls to
## PubSub APIs. If not set explicitly, Telegraf will attempt to use
## Application Default Credentials, which is preferred.
# credentials_file = "path/to/my/creds.json"
## Optional. If true, will send all metrics per write in one PubSub message.
# send_batched = true
## The following publish_* parameters specifically configures batching
## requests made to the GCP Cloud PubSub API via the PubSub Golang library. Read
## more here: https://godoc.org/cloud.google.com/go/pubsub#PublishSettings
## Optional. Send a request to PubSub (i.e. actually publish a batch)
## when it has this many PubSub messages. If send_batched is true,
## this is ignored and treated as if it were 1.
# publish_count_threshold = 1000
## Optional. Send a request to PubSub (i.e. actually publish a batch)
## when it has this many PubSub messages. If send_batched is true,
## this is ignored and treated as if it were 1
# publish_byte_threshold = 1000000
## Optional. Specifically configures requests made to the PubSub API.
# publish_num_go_routines = 2
## Optional. Specifies a timeout for requests to the PubSub API.
# publish_timeout = "30s"
## Optional. If true, published PubSub message data will be base64-encoded.
# base64_data = false
## NOTE: Due to the way TOML is parsed, tables must be at the END of the
## plugin definition, otherwise additional config options are read as part of
## the table
## Optional. PubSub attributes to add to metrics.
# [outputs.cloud_pubsub.attributes]
# my_attr = "tag_value"
```
[pubsub]: https://cloud.google.com/pubsub
[data_formats]: /docs/DATA_FORMATS_OUTPUT.md

View file

@ -0,0 +1,288 @@
//go:generate ../../../tools/readme_config_includer/generator
package cloud_pubsub
import (
"context"
_ "embed"
"encoding/base64"
"errors"
"fmt"
"sync"
"time"
"cloud.google.com/go/pubsub"
"golang.org/x/oauth2/google"
"google.golang.org/api/option"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/outputs"
)
//go:embed sample.conf
var sampleConfig string
type PubSub struct {
CredentialsFile string `toml:"credentials_file"`
Project string `toml:"project"`
Topic string `toml:"topic"`
Attributes map[string]string `toml:"attributes"`
SendBatched bool `toml:"send_batched"`
PublishCountThreshold int `toml:"publish_count_threshold"`
PublishByteThreshold int `toml:"publish_byte_threshold"`
PublishNumGoroutines int `toml:"publish_num_go_routines"`
PublishTimeout config.Duration `toml:"publish_timeout"`
Base64Data bool `toml:"base64_data"`
ContentEncoding string `toml:"content_encoding"`
Log telegraf.Logger `toml:"-"`
t topic
c *pubsub.Client
stubTopic func(id string) topic
serializer telegraf.Serializer
publishResults []publishResult
encoder internal.ContentEncoder
}
func (*PubSub) SampleConfig() string {
return sampleConfig
}
func (ps *PubSub) SetSerializer(serializer telegraf.Serializer) {
ps.serializer = serializer
}
func (ps *PubSub) Connect() error {
if ps.stubTopic == nil {
return ps.initPubSubClient()
}
return nil
}
func (ps *PubSub) Close() error {
if ps.t != nil {
ps.t.Stop()
}
return nil
}
func (ps *PubSub) Write(metrics []telegraf.Metric) error {
ps.refreshTopic()
// Serialize metrics and package into appropriate PubSub messages
msgs, err := ps.toMessages(metrics)
if err != nil {
return err
}
cctx, cancel := context.WithCancel(context.Background())
// Publish all messages - each call to Publish returns a future.
ps.publishResults = make([]publishResult, 0, len(msgs))
for _, m := range msgs {
ps.publishResults = append(ps.publishResults, ps.t.Publish(cctx, m))
}
// topic.Stop() forces all published messages to be sent, even
// if PubSub batch limits have not been reached.
go ps.t.Stop()
return ps.waitForResults(cctx, cancel)
}
func (ps *PubSub) initPubSubClient() error {
var credsOpt option.ClientOption
if ps.CredentialsFile != "" {
credsOpt = option.WithCredentialsFile(ps.CredentialsFile)
} else {
creds, err := google.FindDefaultCredentials(context.Background(), pubsub.ScopeCloudPlatform)
if err != nil {
return fmt.Errorf(
"unable to find GCP Application Default Credentials: %v."+
"Either set ADC or provide CredentialsFile config", err)
}
credsOpt = option.WithCredentials(creds)
}
client, err := pubsub.NewClient(
context.Background(),
ps.Project,
credsOpt,
option.WithScopes(pubsub.ScopeCloudPlatform),
option.WithUserAgent(internal.ProductToken()),
)
if err != nil {
return fmt.Errorf("unable to generate PubSub client: %w", err)
}
ps.c = client
return nil
}
func (ps *PubSub) refreshTopic() {
if ps.stubTopic != nil {
ps.t = ps.stubTopic(ps.Topic)
} else {
t := ps.c.Topic(ps.Topic)
ps.t = &topicWrapper{t}
}
ps.t.SetPublishSettings(ps.publishSettings())
}
func (ps *PubSub) publishSettings() pubsub.PublishSettings {
settings := pubsub.PublishSettings{}
if ps.PublishNumGoroutines > 0 {
settings.NumGoroutines = ps.PublishNumGoroutines
}
if time.Duration(ps.PublishTimeout) > 0 {
settings.CountThreshold = 1
}
if ps.SendBatched {
settings.CountThreshold = 1
} else if ps.PublishCountThreshold > 0 {
settings.CountThreshold = ps.PublishCountThreshold
}
if ps.PublishByteThreshold > 0 {
settings.ByteThreshold = ps.PublishByteThreshold
}
return settings
}
func (ps *PubSub) toMessages(metrics []telegraf.Metric) ([]*pubsub.Message, error) {
if ps.SendBatched {
b, err := ps.serializer.SerializeBatch(metrics)
if err != nil {
return nil, err
}
b = ps.encodeB64Data(b)
b, err = ps.compressData(b)
if err != nil {
return nil, fmt.Errorf("unable to compress message with %s: %w", ps.ContentEncoding, err)
}
msg := &pubsub.Message{Data: b}
if ps.Attributes != nil {
msg.Attributes = ps.Attributes
}
return []*pubsub.Message{msg}, nil
}
msgs := make([]*pubsub.Message, 0, len(metrics))
for _, m := range metrics {
b, err := ps.serializer.Serialize(m)
if err != nil {
ps.Log.Debugf("Could not serialize metric: %v", err)
continue
}
b = ps.encodeB64Data(b)
b, err = ps.compressData(b)
if err != nil {
ps.Log.Errorf("Unable to compress message with %s: %v", ps.ContentEncoding, err)
continue
}
msg := &pubsub.Message{
Data: b,
}
if ps.Attributes != nil {
msg.Attributes = ps.Attributes
}
msgs = append(msgs, msg)
}
return msgs, nil
}
func (ps *PubSub) encodeB64Data(data []byte) []byte {
if ps.Base64Data {
encoded := base64.StdEncoding.EncodeToString(data)
data = []byte(encoded)
}
return data
}
func (ps *PubSub) compressData(data []byte) ([]byte, error) {
if ps.ContentEncoding == "identity" {
return data, nil
}
data, err := ps.encoder.Encode(data)
if err != nil {
return nil, err
}
compressedData := make([]byte, len(data))
copy(compressedData, data)
data = compressedData
return data, nil
}
func (ps *PubSub) waitForResults(ctx context.Context, cancel context.CancelFunc) error {
var pErr error
var setErr sync.Once
var wg sync.WaitGroup
for _, pr := range ps.publishResults {
wg.Add(1)
go func(r publishResult) {
defer wg.Done()
// Wait on each future
_, err := r.Get(ctx)
if err != nil {
setErr.Do(func() {
pErr = err
cancel()
})
}
}(pr)
}
wg.Wait()
return pErr
}
func (ps *PubSub) Init() error {
if ps.Topic == "" {
return errors.New(`"topic" is required`)
}
if ps.Project == "" {
return errors.New(`"project" is required`)
}
switch ps.ContentEncoding {
case "", "identity":
ps.ContentEncoding = "identity"
case "gzip":
var err error
ps.encoder, err = internal.NewContentEncoder(ps.ContentEncoding)
if err != nil {
return err
}
default:
return fmt.Errorf("invalid value %q for content_encoding", ps.ContentEncoding)
}
return nil
}
func init() {
outputs.Add("cloud_pubsub", func() telegraf.Output {
return &PubSub{}
})
}

View file

@ -0,0 +1,240 @@
package cloud_pubsub
import (
"encoding/base64"
"testing"
"cloud.google.com/go/pubsub"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/parsers/influx"
"github.com/influxdata/telegraf/testutil"
)
func TestPubSub_WriteSingle(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
}
settings := pubsub.DefaultPublishSettings
settings.CountThreshold = 1
ps, topic, metrics := getTestResources(t, settings, testMetrics)
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyRawMetricPublished(t, testM.m, topic.published)
}
}
func TestPubSub_WriteWithAttribute(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
}
settings := pubsub.DefaultPublishSettings
ps, topic, metrics := getTestResources(t, settings, testMetrics)
ps.Attributes = map[string]string{
"foo1": "bar1",
"foo2": "bar2",
}
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
msg := verifyRawMetricPublished(t, testM.m, topic.published)
require.Equalf(t, "bar1", msg.Attributes["foo1"], "expected attribute foo1=bar1")
require.Equalf(t, "bar2", msg.Attributes["foo2"], "expected attribute foo2=bar2")
}
}
func TestPubSub_WriteMultiple(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
{testutil.TestMetric("value_2", "test"), false},
}
settings := pubsub.DefaultPublishSettings
ps, topic, metrics := getTestResources(t, settings, testMetrics)
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyRawMetricPublished(t, testM.m, topic.published)
}
require.Equalf(t, 1, topic.getBundleCount(), "unexpected bundle count")
}
func TestPubSub_WriteOverCountThreshold(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
{testutil.TestMetric("value_2", "test"), false},
{testutil.TestMetric("value_3", "test"), false},
{testutil.TestMetric("value_4", "test"), false},
}
settings := pubsub.DefaultPublishSettings
settings.CountThreshold = 2
ps, topic, metrics := getTestResources(t, settings, testMetrics)
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyRawMetricPublished(t, testM.m, topic.published)
}
require.Equalf(t, 2, topic.getBundleCount(), "unexpected bundle count")
}
func TestPubSub_WriteOverByteThreshold(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
{testutil.TestMetric("value_2", "test"), false},
}
settings := pubsub.DefaultPublishSettings
settings.CountThreshold = 10
settings.ByteThreshold = 1
ps, topic, metrics := getTestResources(t, settings, testMetrics)
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyRawMetricPublished(t, testM.m, topic.published)
}
require.Equalf(t, 2, topic.getBundleCount(), "unexpected bundle count")
}
func TestPubSub_WriteBase64Single(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
{testutil.TestMetric("value_2", "test"), false},
}
settings := pubsub.DefaultPublishSettings
settings.CountThreshold = 1
ps, topic, metrics := getTestResources(t, settings, testMetrics)
ps.Base64Data = true
topic.Base64Data = true
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyMetricPublished(t, testM.m, topic.published, true /* base64encoded */, false /* gzipEncoded */)
}
}
func TestPubSub_Error(t *testing.T) {
testMetrics := []testMetric{
// Force this batch to return error
{testutil.TestMetric("value_1", "test"), true},
{testutil.TestMetric("value_2", "test"), false},
}
settings := pubsub.DefaultPublishSettings
ps, _, metrics := getTestResources(t, settings, testMetrics)
err := ps.Write(metrics)
require.Error(t, err)
require.ErrorContains(t, err, errMockFail)
}
func TestPubSub_WriteGzipSingle(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
{testutil.TestMetric("value_2", "test"), false},
}
settings := pubsub.DefaultPublishSettings
settings.CountThreshold = 1
ps, topic, metrics := getTestResources(t, settings, testMetrics)
topic.ContentEncoding = "gzip"
ps.ContentEncoding = "gzip"
var err error
ps.encoder, err = internal.NewContentEncoder(ps.ContentEncoding)
require.NoError(t, err)
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyMetricPublished(t, testM.m, topic.published, false /* base64encoded */, true /* Gzipencoded */)
}
}
func TestPubSub_WriteGzipAndBase64Single(t *testing.T) {
testMetrics := []testMetric{
{testutil.TestMetric("value_1", "test"), false},
{testutil.TestMetric("value_2", "test"), false},
}
settings := pubsub.DefaultPublishSettings
settings.CountThreshold = 1
ps, topic, metrics := getTestResources(t, settings, testMetrics)
topic.ContentEncoding = "gzip"
topic.Base64Data = true
ps.ContentEncoding = "gzip"
ps.Base64Data = true
var err error
ps.encoder, err = internal.NewContentEncoder(ps.ContentEncoding)
require.NoError(t, err)
require.NoError(t, ps.Write(metrics))
for _, testM := range testMetrics {
verifyMetricPublished(t, testM.m, topic.published, true /* base64encoded */, true /* Gzipencoded */)
}
}
func verifyRawMetricPublished(t *testing.T, m telegraf.Metric, published map[string]*pubsub.Message) *pubsub.Message {
return verifyMetricPublished(t, m, published, false, false)
}
func verifyMetricPublished(t *testing.T, m telegraf.Metric, published map[string]*pubsub.Message, base64Encoded, gzipEncoded bool) *pubsub.Message {
p := influx.Parser{}
require.NoError(t, p.Init())
v, _ := m.GetField("value")
psMsg, ok := published[v.(string)]
if !ok {
t.Fatalf("expected metric to get published (value: %s)", v.(string))
}
data := psMsg.Data
if gzipEncoded {
decoder, err := internal.NewContentDecoder("gzip")
require.NoError(t, err)
data, err = decoder.Decode(data)
if err != nil {
t.Fatalf("Unable to decode expected gzip encoded message: %s", err)
}
}
if base64Encoded {
v, err := base64.StdEncoding.DecodeString(string(data))
if err != nil {
t.Fatalf("Unable to decode expected base64-encoded message: %s", err)
}
data = v
}
parsed, err := p.Parse(data)
if err != nil {
t.Fatalf("could not parse influxdb metric from published message: %s", string(data))
}
if len(parsed) > 1 {
t.Fatalf("expected only one influxdb metric per published message, got %d", len(published))
}
publishedV, ok := parsed[0].GetField("value")
if !ok {
t.Fatalf("expected published metric to have a value")
}
require.Equal(t, v, publishedV, "incorrect published value")
return psMsg
}

View file

@ -0,0 +1,57 @@
# Publish Telegraf metrics to a Google Cloud PubSub topic
[[outputs.cloud_pubsub]]
## Required. Name of Google Cloud Platform (GCP) Project that owns
## the given PubSub topic.
project = "my-project"
## Required. Name of PubSub topic to publish metrics to.
topic = "my-topic"
## Content encoding for message payloads, can be set to "gzip" or
## "identity" to apply no encoding.
# content_encoding = "identity"
## Required. Data format to consume.
## Each data format has its own unique set of configuration options.
## Read more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "influx"
## Optional. Filepath for GCP credentials JSON file to authorize calls to
## PubSub APIs. If not set explicitly, Telegraf will attempt to use
## Application Default Credentials, which is preferred.
# credentials_file = "path/to/my/creds.json"
## Optional. If true, will send all metrics per write in one PubSub message.
# send_batched = true
## The following publish_* parameters specifically configures batching
## requests made to the GCP Cloud PubSub API via the PubSub Golang library. Read
## more here: https://godoc.org/cloud.google.com/go/pubsub#PublishSettings
## Optional. Send a request to PubSub (i.e. actually publish a batch)
## when it has this many PubSub messages. If send_batched is true,
## this is ignored and treated as if it were 1.
# publish_count_threshold = 1000
## Optional. Send a request to PubSub (i.e. actually publish a batch)
## when it has this many PubSub messages. If send_batched is true,
## this is ignored and treated as if it were 1
# publish_byte_threshold = 1000000
## Optional. Specifically configures requests made to the PubSub API.
# publish_num_go_routines = 2
## Optional. Specifies a timeout for requests to the PubSub API.
# publish_timeout = "30s"
## Optional. If true, published PubSub message data will be base64-encoded.
# base64_data = false
## NOTE: Due to the way TOML is parsed, tables must be at the END of the
## plugin definition, otherwise additional config options are read as part of
## the table
## Optional. PubSub attributes to add to metrics.
# [outputs.cloud_pubsub.attributes]
# my_attr = "tag_value"

View file

@ -0,0 +1,45 @@
package cloud_pubsub
import (
"context"
"cloud.google.com/go/pubsub"
)
type (
topic interface {
ID() string
Stop()
Publish(ctx context.Context, msg *pubsub.Message) publishResult
PublishSettings() pubsub.PublishSettings
SetPublishSettings(settings pubsub.PublishSettings)
}
publishResult interface {
Get(ctx context.Context) (string, error)
}
topicWrapper struct {
topic *pubsub.Topic
}
)
func (tw *topicWrapper) ID() string {
return tw.topic.ID()
}
func (tw *topicWrapper) Stop() {
tw.topic.Stop()
}
func (tw *topicWrapper) Publish(ctx context.Context, msg *pubsub.Message) publishResult {
return tw.topic.Publish(ctx, msg)
}
func (tw *topicWrapper) PublishSettings() pubsub.PublishSettings {
return tw.topic.PublishSettings
}
func (tw *topicWrapper) SetPublishSettings(settings pubsub.PublishSettings) {
tw.topic.PublishSettings = settings
}

View file

@ -0,0 +1,238 @@
package cloud_pubsub
import (
"context"
"encoding/base64"
"errors"
"runtime"
"sync"
"testing"
"time"
"cloud.google.com/go/pubsub"
"github.com/stretchr/testify/require"
"google.golang.org/api/support/bundler"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/parsers/influx"
serializers_influx "github.com/influxdata/telegraf/plugins/serializers/influx"
)
const (
errMockFail = "this is an error"
)
type (
testMetric struct {
m telegraf.Metric
returnErr bool
}
bundledMsg struct {
*pubsub.Message
*stubResult
}
stubResult struct {
metricIDs []string
sendError bool
err chan error
done chan struct{}
}
stubTopic struct {
Settings pubsub.PublishSettings
ReturnErr map[string]bool
telegraf.Parser
*testing.T
Base64Data bool
ContentEncoding string
stopped bool
pLock sync.Mutex
published map[string]*pubsub.Message
bundler *bundler.Bundler
bLock sync.Mutex
bundleCount int
}
)
func getTestResources(tT *testing.T, settings pubsub.PublishSettings, testM []testMetric) (*PubSub, *stubTopic, []telegraf.Metric) {
// Instantiate a Influx line-protocol serializer
s := &serializers_influx.Serializer{}
require.NoError(tT, s.Init())
metrics := make([]telegraf.Metric, 0, len(testM))
t := &stubTopic{
T: tT,
ReturnErr: make(map[string]bool),
published: make(map[string]*pubsub.Message),
ContentEncoding: "identity",
}
for _, tm := range testM {
metrics = append(metrics, tm.m)
if tm.returnErr {
v, _ := tm.m.GetField("value")
t.ReturnErr[v.(string)] = true
}
}
ps := &PubSub{
Project: "test-project",
Topic: "test-topic",
stubTopic: func(string) topic { return t },
PublishCountThreshold: settings.CountThreshold,
PublishByteThreshold: settings.ByteThreshold,
PublishNumGoroutines: settings.NumGoroutines,
PublishTimeout: config.Duration(settings.Timeout),
ContentEncoding: "identity",
}
require.NoError(tT, ps.Init())
var err error
ps.encoder, err = internal.NewContentEncoder(ps.ContentEncoding)
require.NoError(tT, err)
ps.SetSerializer(s)
return ps, t, metrics
}
func (*stubTopic) ID() string {
return "test-topic"
}
func (t *stubTopic) Stop() {
t.pLock.Lock()
defer t.pLock.Unlock()
t.stopped = true
t.bundler.Flush()
}
func (t *stubTopic) Publish(ctx context.Context, msg *pubsub.Message) publishResult {
t.pLock.Lock()
defer t.pLock.Unlock()
if t.stopped || ctx.Err() != nil {
t.Fatalf("publish called after stop")
}
ids := t.parseIDs(msg)
r := &stubResult{
metricIDs: ids,
err: make(chan error, 1),
done: make(chan struct{}, 1),
}
for _, id := range ids {
_, ok := t.ReturnErr[id]
r.sendError = r.sendError || ok
}
bundled := &bundledMsg{msg, r}
if err := t.bundler.Add(bundled, len(msg.Data)); err != nil {
t.Fatalf("unexpected error while adding to bundle: %v", err)
}
return r
}
func (t *stubTopic) PublishSettings() pubsub.PublishSettings {
return t.Settings
}
func (t *stubTopic) SetPublishSettings(settings pubsub.PublishSettings) {
t.Settings = settings
t.initBundler()
}
func (t *stubTopic) initBundler() *stubTopic {
t.bundler = bundler.NewBundler(&bundledMsg{}, t.sendBundle())
t.bundler.DelayThreshold = 10 * time.Second
t.bundler.BundleCountThreshold = t.Settings.CountThreshold
if t.bundler.BundleCountThreshold > pubsub.MaxPublishRequestCount {
t.bundler.BundleCountThreshold = pubsub.MaxPublishRequestCount
}
t.bundler.BundleByteThreshold = t.Settings.ByteThreshold
t.bundler.BundleByteLimit = pubsub.MaxPublishRequestBytes
t.bundler.HandlerLimit = 25 * runtime.GOMAXPROCS(0)
return t
}
func (t *stubTopic) sendBundle() func(items interface{}) {
return func(items interface{}) {
t.bLock.Lock()
defer t.bLock.Unlock()
bundled := items.([]*bundledMsg)
for _, msg := range bundled {
r := msg.stubResult
for _, id := range r.metricIDs {
t.published[id] = msg.Message
}
if r.sendError {
r.err <- errors.New(errMockFail)
} else {
r.done <- struct{}{}
}
}
t.bundleCount++
}
}
func (t *stubTopic) parseIDs(msg *pubsub.Message) []string {
p := influx.Parser{}
err := p.Init()
require.NoError(t, err)
decoder, err := internal.NewContentDecoder(t.ContentEncoding)
require.NoError(t, err)
d, err := decoder.Decode(msg.Data)
if err != nil {
t.Errorf("unable to decode message: %v", err)
}
if t.Base64Data {
strData, err := base64.StdEncoding.DecodeString(string(d))
if err != nil {
t.Errorf("unable to base64 decode message: %v", err)
}
d = strData
}
metrics, err := p.Parse(d)
if err != nil {
t.Fatalf("unexpected parsing error: %v", err)
}
ids := make([]string, 0, len(metrics))
for _, met := range metrics {
id, _ := met.GetField("value")
ids = append(ids, id.(string))
}
return ids
}
func (r *stubResult) Get(ctx context.Context) (string, error) {
select {
case <-ctx.Done():
return "", ctx.Err()
case err := <-r.err:
return "", err
case <-r.done:
return "id-" + r.metricIDs[0], nil
}
}
func (t *stubTopic) getBundleCount() int {
t.bLock.Lock()
defer t.bLock.Unlock()
return t.bundleCount
}