1
0
Fork 0

Adding upstream version 1.34.4.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-05-24 07:26:29 +02:00
parent e393c3af3f
commit 4978089aab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
4963 changed files with 677545 additions and 0 deletions

View file

@ -0,0 +1,81 @@
# Google Cloud Storage Input Plugin
This plugin will collect metrics from the given [Google Cloud Storage][gcs]
buckets in any of the supported [data formats][data_formats].
⭐ Telegraf v1.25.0
🏷️ cloud, datastore
💻 all
[gcs]: https://cloud.google.com/storage
[data_formats]: /docs/DATA_FORMATS_INPUT.md
## Global configuration options <!-- @/docs/includes/plugin_config.md -->
In addition to the plugin-specific configuration settings, plugins support
additional global and plugin configuration settings. These settings are used to
modify metrics, tags, and field or create aliases and configure ordering, etc.
See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
[CONFIGURATION.md]: ../../../docs/CONFIGURATION.md#plugins
## Configuration
```toml @sample.conf
# Gather metrics by iterating the files located on a Cloud Storage Bucket.
[[inputs.google_cloud_storage]]
## Required. Name of Cloud Storage bucket to ingest metrics from.
bucket = "my-bucket"
## Optional. Prefix of Cloud Storage bucket keys to list metrics from.
# key_prefix = "my-bucket"
## Key that will store the offsets in order to pick up where the ingestion was left.
offset_key = "offset_key"
## Key that will store the offsets in order to pick up where the ingestion was left.
objects_per_iteration = 10
## Required. Data format to consume.
## Each data format has its own unique set of configuration options.
## Read more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "influx"
## Optional. Filepath for GCP credentials JSON file to authorize calls to
## Google Cloud Storage APIs. If not set explicitly, Telegraf will attempt to use
## Application Default Credentials, which is preferred.
# credentials_file = "path/to/my/creds.json"
```
## Metrics
Measurements will reside on Google Cloud Storage with the format specified, for
example like
```json
{
"metrics": [
{
"fields": {
"cosine": 10,
"sine": -1.0975806427415925e-12
},
"name": "cpu",
"tags": {
"datacenter": "us-east-1",
"host": "localhost"
},
"timestamp": 1604148850990
}
]
}
```
when the [data format][data_formats] is set to `json`.
## Example Output
```text
google_cloud_storage,datacenter=us-east-1,host=localhost cosine=10,sine=-1.0975806427415925e-12 1604148850990000000
```

View file

@ -0,0 +1,281 @@
//go:generate ../../../tools/readme_config_includer/generator
package gcs
import (
"bytes"
"context"
_ "embed"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"cloud.google.com/go/storage"
"golang.org/x/oauth2/google"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/inputs"
)
const (
emulatorHostEnv = "STORAGE_EMULATOR_HOST"
defaultOffSetKey = "offset-key.json"
)
//go:embed sample.conf
var sampleConfig string
type GCS struct {
CredentialsFile string `toml:"credentials_file"`
Bucket string `toml:"bucket"`
Prefix string `toml:"key_prefix"`
OffsetKey string `toml:"offset_key"`
ObjectsPerIteration int `toml:"objects_per_iteration"`
Log telegraf.Logger `toml:"-"`
offSet offSet
parser telegraf.Parser
client *storage.Client
ctx context.Context
}
type offSet struct {
OffSet string `json:"offSet"`
}
func (gcs *GCS) Init() error {
gcs.ctx = context.Background()
err := gcs.setUpClient()
if err != nil {
gcs.Log.Error("Could not create client", err)
return err
}
return gcs.setOffset()
}
func (*GCS) SampleConfig() string {
return sampleConfig
}
func (gcs *GCS) SetParser(parser telegraf.Parser) {
gcs.parser = parser
}
func (gcs *GCS) Gather(acc telegraf.Accumulator) error {
query := gcs.createQuery()
bucketName := gcs.Bucket
bucket := gcs.client.Bucket(bucketName)
it := bucket.Objects(gcs.ctx, &query)
processed := 0
var name string
for {
attrs, err := it.Next()
if errors.Is(err, iterator.Done) {
gcs.Log.Infof("Iterated all the keys")
break
}
if err != nil {
gcs.Log.Errorf("Error during iteration of keys: %v", err)
return err
}
name = attrs.Name
if !gcs.shouldIgnore(name) {
if err := gcs.processMeasurementsInObject(name, bucket, acc); err != nil {
gcs.Log.Errorf("Could not process object %q in bucket %q: %v", name, bucketName, err)
acc.AddError(fmt.Errorf("could not process object %q in bucket %q: %w", name, bucketName, err))
}
}
processed++
if gcs.reachedThreshlod(processed) {
return gcs.updateOffset(bucket, name)
}
}
return gcs.updateOffset(bucket, name)
}
func (gcs *GCS) createQuery() storage.Query {
if gcs.offSet.isPresent() {
return storage.Query{Prefix: gcs.Prefix, StartOffset: gcs.offSet.OffSet}
}
return storage.Query{Prefix: gcs.Prefix}
}
func (gcs *GCS) shouldIgnore(name string) bool {
return gcs.offSet.OffSet == name || gcs.OffsetKey == name
}
func (gcs *GCS) processMeasurementsInObject(name string, bucket *storage.BucketHandle, acc telegraf.Accumulator) error {
gcs.Log.Debugf("Fetching key: %s", name)
r, err := bucket.Object(name).NewReader(gcs.ctx)
defer gcs.closeReader(r)
if err != nil {
return err
}
metrics, err := gcs.fetchedMetrics(r)
if err != nil {
return err
}
for _, metric := range metrics {
acc.AddFields(metric.Name(), metric.Fields(), metric.Tags(), metric.Time())
}
return nil
}
func (gcs *GCS) fetchedMetrics(r *storage.Reader) ([]telegraf.Metric, error) {
buf := new(bytes.Buffer)
if _, err := buf.ReadFrom(r); err != nil {
return nil, err
}
return gcs.parser.Parse(buf.Bytes())
}
func (gcs *GCS) reachedThreshlod(processed int) bool {
return gcs.ObjectsPerIteration != 0 && processed >= gcs.ObjectsPerIteration
}
func (gcs *GCS) updateOffset(bucket *storage.BucketHandle, name string) error {
if gcs.shouldIgnore(name) {
return nil
}
offsetModel := newOffset(name)
marshalled, err := json.Marshal(offsetModel)
if err != nil {
return err
}
offsetKey := bucket.Object(gcs.OffsetKey)
writer := offsetKey.NewWriter(gcs.ctx)
writer.ContentType = "application/json"
defer writer.Close()
if _, err := writer.Write(marshalled); err != nil {
return err
}
gcs.offSet = *offsetModel
return nil
}
func (gcs *GCS) setUpClient() error {
if endpoint, present := os.LookupEnv(emulatorHostEnv); present {
return gcs.setUpLocalClient(endpoint)
}
return gcs.setUpDefaultClient()
}
func (gcs *GCS) setUpLocalClient(endpoint string) error {
noAuth := option.WithoutAuthentication()
endpoints := option.WithEndpoint("http://" + endpoint)
client, err := storage.NewClient(gcs.ctx, noAuth, endpoints)
if err != nil {
return err
}
gcs.client = client
return nil
}
func (gcs *GCS) setUpDefaultClient() error {
var credentialsOption option.ClientOption
if gcs.CredentialsFile != "" {
credentialsOption = option.WithCredentialsFile(gcs.CredentialsFile)
} else {
creds, err := google.FindDefaultCredentials(gcs.ctx, storage.ScopeReadOnly)
if err != nil {
return fmt.Errorf(
"unable to find GCP Application Default Credentials: %v."+
"Either set ADC or provide CredentialsFile config", err)
}
credentialsOption = option.WithCredentials(creds)
}
client, err := storage.NewClient(gcs.ctx, credentialsOption)
gcs.client = client
return err
}
func (gcs *GCS) setOffset() error {
if gcs.client == nil {
return errors.New("cannot set offset if client is not set")
}
if gcs.OffsetKey != "" {
gcs.OffsetKey = gcs.Prefix + gcs.OffsetKey
} else {
gcs.OffsetKey = gcs.Prefix + defaultOffSetKey
}
btk := gcs.client.Bucket(gcs.Bucket)
obj := btk.Object(gcs.OffsetKey)
var offSet offSet
if r, err := obj.NewReader(gcs.ctx); err == nil {
defer gcs.closeReader(r)
buf := new(bytes.Buffer)
if _, err := io.Copy(buf, r); err == nil {
if marshalError := json.Unmarshal(buf.Bytes(), &offSet); marshalError != nil {
return marshalError
}
}
} else {
offSet = *newEmptyOffset()
}
gcs.offSet = offSet
return nil
}
func (gcs *GCS) closeReader(r *storage.Reader) {
if err := r.Close(); err != nil {
gcs.Log.Errorf("Could not close reader: %v", err)
}
}
func newEmptyOffset() *offSet {
return &offSet{OffSet: ""}
}
func newOffset(offset string) *offSet {
return &offSet{OffSet: offset}
}
func (offSet *offSet) isPresent() bool {
return offSet.OffSet != ""
}
func init() {
inputs.Add("google_cloud_storage", func() telegraf.Input {
gcs := &GCS{}
return gcs
})
}

View file

@ -0,0 +1,445 @@
package gcs
import (
"encoding/json"
"fmt"
"io"
"mime"
"net/http"
"net/http/httptest"
"os"
"strings"
"testing"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
parsers_json "github.com/influxdata/telegraf/plugins/parsers/json"
"github.com/influxdata/telegraf/testutil"
)
const offSetTemplate = "{\"offSet\":\"%s\"}"
func TestRunSetUpClient(t *testing.T) {
gcs := &GCS{
Bucket: "test-bucket",
Prefix: "prefix",
OffsetKey: "1230405",
Log: testutil.Logger{},
}
if err := gcs.setUpClient(); err != nil {
t.Log(err)
}
}
func TestRunInit(t *testing.T) {
srv := startGCSServer(t)
defer srv.Close()
emulatorSetEnv(t, srv)
gcs := &GCS{
Bucket: "test-bucket",
Prefix: "prefix/",
OffsetKey: "offset.json",
Log: testutil.Logger{},
}
require.NoError(t, gcs.Init())
require.Equal(t, "offsetfile", gcs.offSet.OffSet)
}
func TestRunInitNoOffsetKey(t *testing.T) {
srv := startGCSServer(t)
defer srv.Close()
emulatorSetEnv(t, srv)
gcs := &GCS{
Bucket: "test-bucket",
Prefix: "prefix/",
Log: testutil.Logger{},
}
require.NoError(t, gcs.Init())
require.Equal(t, "offsetfile", gcs.offSet.OffSet)
require.Equal(t, "prefix/offset-key.json", gcs.OffsetKey)
}
func TestRunGatherOneItem(t *testing.T) {
srv := startOneItemGCSServer(t)
defer srv.Close()
emulatorSetEnv(t, srv)
acc := &testutil.Accumulator{}
gcs := &GCS{
Bucket: "test-iteration-bucket",
Prefix: "prefix/",
Log: testutil.Logger{},
parser: createParser(),
}
require.NoError(t, gcs.Init())
require.NoError(t, gcs.Gather(acc))
metric := acc.Metrics[0]
require.Equal(t, "cpu", metric.Measurement)
require.Equal(t, "us-east-1", metric.Tags["tags_datacenter"])
require.Equal(t, "localhost", metric.Tags["tags_host"])
require.InDelta(t, 10.0, metric.Fields["fields_cosine"], testutil.DefaultDelta)
require.InEpsilon(t, -1.0975806427415925e-12, metric.Fields["fields_sine"], testutil.DefaultEpsilon)
}
func TestRunGatherOneIteration(t *testing.T) {
srv := startMultipleItemGCSServer(t)
defer srv.Close()
emulatorSetEnv(t, srv)
gcs := &GCS{
Bucket: "test-iteration-bucket",
Prefix: "prefix/",
OffsetKey: "custom-offset-key.json",
Log: testutil.Logger{},
parser: createParser(),
}
acc := &testutil.Accumulator{}
require.NoError(t, gcs.Init())
require.NoError(t, gcs.Gather(acc))
require.Len(t, acc.Metrics, 3)
}
func TestRunGatherIteratiosnWithLimit(t *testing.T) {
srv := startMultipleItemGCSServer(t)
defer srv.Close()
emulatorSetEnv(t, srv)
gcs := &GCS{
Bucket: "test-iteration-bucket",
Prefix: "prefix/",
ObjectsPerIteration: 1,
OffsetKey: "custom-offset-key.json",
Log: testutil.Logger{},
parser: createParser(),
}
acc := &testutil.Accumulator{}
require.NoError(t, gcs.Init())
require.NoError(t, gcs.Gather(acc))
require.Len(t, acc.Metrics, 1)
require.NoError(t, gcs.Gather(acc))
require.Len(t, acc.Metrics, 2)
require.NoError(t, gcs.Gather(acc))
require.Len(t, acc.Metrics, 3)
}
func TestRunGatherIterationWithPages(t *testing.T) {
srv := stateFullGCSServer(t)
defer srv.Close()
emulatorSetEnv(t, srv)
gcs := &GCS{
Bucket: "test-iteration-bucket",
Prefix: "prefix/",
OffsetKey: "custom-offset-key.json",
Log: testutil.Logger{},
parser: createParser(),
}
acc := &testutil.Accumulator{}
require.NoError(t, gcs.Init())
require.NoError(t, gcs.Gather(acc))
require.Len(t, acc.Metrics, 4)
require.True(t, gcs.offSet.isPresent())
require.Equal(t, "prefix/1604148850994", gcs.offSet.OffSet)
emptyAcc := &testutil.Accumulator{}
require.NoError(t, gcs.Gather(emptyAcc))
require.Empty(t, emptyAcc.Metrics)
}
func createParser() telegraf.Parser {
p := &parsers_json.Parser{
MetricName: "cpu",
Query: "metrics",
TagKeys: []string{"tags_datacenter", "tags_host"},
TimeKey: "timestamp",
TimeFormat: "unix_ms",
Strict: true,
}
if err := p.Init(); err != nil {
panic(err)
}
return p
}
func startGCSServer(t *testing.T) *httptest.Server {
srv := httptest.NewServer(http.NotFoundHandler())
currentOffSetKey := fmt.Sprintf(offSetTemplate, "offsetfile")
srv.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/test-bucket/prefix/offset.json":
w.WriteHeader(http.StatusOK)
if _, err := w.Write([]byte(currentOffSetKey)); err != nil {
w.WriteHeader(http.StatusInternalServerError)
t.Error(err)
return
}
case "/test-bucket/prefix/offset-key.json":
w.WriteHeader(http.StatusOK)
if _, err := w.Write([]byte("{\"offSet\":\"offsetfile\"}")); err != nil {
w.WriteHeader(http.StatusInternalServerError)
t.Error(err)
return
}
default:
failPath(r.URL.Path, t, w)
}
})
return srv
}
func startOneItemGCSServer(t *testing.T) *httptest.Server {
srv := httptest.NewServer(http.NotFoundHandler())
singleFileList := readJSON(t, "testdata/single_file_list.json")
srv.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/b/test-iteration-bucket/o":
serveJSONText(w, singleFileList)
default:
serveBlobs(t, w, r.URL.Path, "")
}
})
return srv
}
func startMultipleItemGCSServer(t *testing.T) *httptest.Server {
objListing := parseJSONFromFile(t, "testdata/file_listing.json")
firstElement := parseJSONFromFile(t, "testdata/first_file_listing.json")
secondElement := parseJSONFromFile(t, "testdata/second_file_listing.json")
thirdElement := parseJSONFromFile(t, "testdata/third_file_listing.json")
fourthElement := parseJSONFromFile(t, "testdata/fourth_file_listing.json")
srv := httptest.NewServer(http.NotFoundHandler())
currentOffSetKey := fmt.Sprintf(offSetTemplate, "prefix/1604148850991")
srv.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/b/test-iteration-bucket/o":
offset := r.URL.Query().Get("startOffset")
if offset == "prefix/1604148850990" {
objListing["items"] = []interface{}{firstElement, secondElement, thirdElement, fourthElement}
} else if offset == "prefix/1604148850991" {
objListing["items"] = []interface{}{secondElement, thirdElement, fourthElement}
} else if offset == "prefix/16041488509912" {
objListing["items"] = []interface{}{thirdElement, fourthElement}
} else if offset == "prefix/16041488509913" {
objListing["items"] = []interface{}{thirdElement, fourthElement}
} else {
objListing["items"] = []interface{}{firstElement, secondElement, thirdElement, fourthElement}
}
if data, err := json.Marshal(objListing); err == nil {
w.WriteHeader(http.StatusOK)
if _, err := w.Write(data); err != nil {
w.WriteHeader(http.StatusInternalServerError)
t.Error(err)
return
}
} else {
w.WriteHeader(http.StatusNotFound)
t.Fatalf("unexpected path: %s", r.URL.Path)
}
default:
serveBlobs(t, w, r.URL.Path, currentOffSetKey)
}
})
return srv
}
func stateFullGCSServer(t *testing.T) *httptest.Server {
srv := httptest.NewServer(http.NotFoundHandler())
firstElement := parseJSONFromFile(t, "testdata/first_file_listing.json")
secondElement := parseJSONFromFile(t, "testdata/second_file_listing.json")
thirdElement := parseJSONFromFile(t, "testdata/third_file_listing.json")
fourthElement := parseJSONFromFile(t, "testdata/fourth_file_listing.json")
currentOffSetKey := fmt.Sprintf(offSetTemplate, "prefix/1604148850990")
srv.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/b/test-iteration-bucket/o":
offset := r.URL.Query().Get("startOffset")
objListing := parseJSONFromFile(t, "testdata/file_listing.json")
pageToken := r.URL.Query().Get("pageToken")
if pageToken == "page2" {
objListing["items"] = []interface{}{secondElement}
objListing["nextPageToken"] = "page3"
} else if pageToken == "page3" {
objListing["items"] = []interface{}{thirdElement}
objListing["nextPageToken"] = "page4"
} else if pageToken == "page4" {
objListing["items"] = []interface{}{fourthElement}
} else if offset == "prefix/1604148850994" {
objListing["items"] = make([]interface{}, 0)
} else {
objListing["items"] = []interface{}{firstElement}
objListing["nextPageToken"] = "page2"
}
if data, err := json.Marshal(objListing); err == nil {
w.WriteHeader(http.StatusOK)
if _, err := w.Write(data); err != nil {
w.WriteHeader(http.StatusInternalServerError)
t.Error(err)
return
}
} else {
failPath(r.URL.Path, t, w)
}
case "/upload/storage/v1/b/test-iteration-bucket/o":
_, params, err := mime.ParseMediaType(r.Header["Content-Type"][0])
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
t.Error(err)
return
}
boundary := params["boundary"]
currentOffSetKey, err = fetchJSON(t, boundary, r.Body)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
t.Error(err)
return
}
default:
serveBlobs(t, w, r.URL.Path, currentOffSetKey)
}
})
return srv
}
func serveBlobs(t *testing.T, w http.ResponseWriter, urlPath, offsetKey string) {
singleObjectNotFound := readJSON(t, "testdata/single_object_not_found.json")
firstFile := readJSON(t, "testdata/first_file.json")
secondFile := readJSON(t, "testdata/second_file.json")
thirdFile := readJSON(t, "testdata/third_file.json")
fourthFile := readJSON(t, "testdata/fourth_file.json")
switch urlPath {
case "/test-iteration-bucket/prefix/offset-key.json":
w.WriteHeader(http.StatusNotFound)
_, err := w.Write(singleObjectNotFound)
require.NoError(t, err)
case "/test-bucket/prefix/offset.json":
w.WriteHeader(http.StatusOK)
_, err := w.Write([]byte(offsetKey))
require.NoError(t, err)
case "/test-bucket/prefix/offset-key.json":
w.WriteHeader(http.StatusOK)
_, err := w.Write([]byte("{\"offSet\":\"offsetfile\"}"))
require.NoError(t, err)
case "/test-iteration-bucket/prefix/custom-offset-key.json":
w.WriteHeader(http.StatusOK)
_, err := w.Write([]byte(offsetKey))
require.NoError(t, err)
case "/test-iteration-bucket/1604148850990":
serveJSONText(w, firstFile)
case "/test-iteration-bucket/prefix/1604148850991":
serveJSONText(w, firstFile)
case "/test-iteration-bucket/prefix/1604148850992":
serveJSONText(w, secondFile)
case "/test-iteration-bucket/prefix/1604148850993":
serveJSONText(w, thirdFile)
case "/test-iteration-bucket/prefix/1604148850994":
serveJSONText(w, fourthFile)
case "/upload/storage/v1/b/test-iteration-bucket/o":
w.WriteHeader(http.StatusOK)
default:
failPath(urlPath, t, w)
}
}
func fetchJSON(t *testing.T, boundary string, rc io.ReadCloser) (string, error) {
defer rc.Close()
bodyBytes, err := io.ReadAll(rc)
if err != nil {
t.Fatalf("Could not read bytes from offset action")
return "", err
}
splits := strings.Split(string(bodyBytes), boundary)
offsetPart := splits[2]
offsets := strings.Split(offsetPart, "\n")
fmt.Print(offsets[3])
return offsets[3], nil
}
func serveJSONText(w http.ResponseWriter, jsonText []byte) {
w.WriteHeader(http.StatusOK)
if _, err := w.Write(jsonText); err != nil {
fmt.Println(err)
}
}
func failPath(path string, t *testing.T, w http.ResponseWriter) {
w.WriteHeader(http.StatusNotFound)
t.Fatalf("unexpected path: %s", path)
}
func parseJSONFromFile(t *testing.T, jsonFilePath string) map[string]interface{} {
data := readJSON(t, jsonFilePath)
var element map[string]interface{}
if err := json.Unmarshal(data, &element); err != nil {
require.NoErrorf(t, err, "could not parse from data file %s", jsonFilePath)
}
return element
}
func readJSON(t *testing.T, jsonFilePath string) []byte {
data, err := os.ReadFile(jsonFilePath)
require.NoErrorf(t, err, "could not read from data file %s", jsonFilePath)
return data
}
func emulatorSetEnv(t *testing.T, srv *httptest.Server) {
t.Setenv("STORAGE_EMULATOR_HOST", strings.ReplaceAll(srv.URL, "http://", ""))
}

View file

@ -0,0 +1,24 @@
# Gather metrics by iterating the files located on a Cloud Storage Bucket.
[[inputs.google_cloud_storage]]
## Required. Name of Cloud Storage bucket to ingest metrics from.
bucket = "my-bucket"
## Optional. Prefix of Cloud Storage bucket keys to list metrics from.
# key_prefix = "my-bucket"
## Key that will store the offsets in order to pick up where the ingestion was left.
offset_key = "offset_key"
## Key that will store the offsets in order to pick up where the ingestion was left.
objects_per_iteration = 10
## Required. Data format to consume.
## Each data format has its own unique set of configuration options.
## Read more about them here:
## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md
data_format = "influx"
## Optional. Filepath for GCP credentials JSON file to authorize calls to
## Google Cloud Storage APIs. If not set explicitly, Telegraf will attempt to use
## Application Default Credentials, which is preferred.
# credentials_file = "path/to/my/creds.json"

View file

@ -0,0 +1,3 @@
{
"kind": "storage#objects"
}

View file

@ -0,0 +1,16 @@
{
"metrics": [
{
"fields": {
"cosine": 10,
"sine": -1.0975806427415925e-12
},
"name": "cpu",
"tags": {
"datacenter": "us-east-1",
"host": "localhost"
},
"timestamp": 1604148850991
}
]
}

View file

@ -0,0 +1,19 @@
{
"kind": "storage#object",
"id": "test-iteration-bucket/prefix/1604148850991/1604148851353983",
"selfLink": "https://www.googleapis.com/storage/v1/b/test-iteration-bucket/o/1604148850991",
"mediaLink": "https://content-storage.googleapis.com/download/storage/v1/b/test-iteration-bucket/o/1604148850991?generation=1604148851353983&alt=media",
"name": "prefix/1604148850991",
"bucket": "test-iteration-bucket",
"generation": "1604148851353983",
"metageneration": "1",
"contentType": "text/plain; charset=utf-8",
"storageClass": "STANDARD",
"size": "161",
"md5Hash": "y59iuRCTpkm7wpvU5YHUYw==",
"crc32c": "y57reA==",
"etag": "CP/SzpPw3uwCEAE=",
"timeCreated": "2020-10-31T12:54:11.353Z",
"updated": "2020-10-31T12:54:11.353Z",
"timeStorageClassUpdated": "2020-10-31T12:54:11.353Z"
}

View file

@ -0,0 +1,16 @@
{
"metrics": [
{
"fields": {
"cosine": 13,
"sine": -4.0975806427415925e-12
},
"name": "cpu",
"tags": {
"datacenter": "us-east-1",
"host": "localhost"
},
"timestamp": 1604148850994
}
]
}

View file

@ -0,0 +1,19 @@
{
"kind": "storage#object",
"id": "test-iteration-bucket/prefix/1604148850994/1604148851467554",
"selfLink": "https://www.googleapis.com/storage/v1/b/test-iteration-bucket/o/1604148850994",
"mediaLink": "https://content-storage.googleapis.com/download/storage/v1/b/test-iteration-bucket/o/1604148850994?generation=1604148851467554&alt=media",
"name": "prefix/1604148850994",
"bucket": "test-iteration-bucket",
"generation": "1604148851467554",
"metageneration": "1",
"contentType": "text/plain; charset=utf-8",
"storageClass": "STANDARD",
"size": "161",
"md5Hash": "y59iuRCTpkm7wpvU5YHUYw==",
"crc32c": "y57reA==",
"etag": "CKLK1ZPw3uwCEAE=",
"timeCreated": "2020-10-31T12:54:11.467Z",
"updated": "2020-10-31T12:54:11.467Z",
"timeStorageClassUpdated": "2020-10-31T12:54:11.467Z"
}

View file

@ -0,0 +1,16 @@
{
"metrics": [
{
"fields": {
"cosine": 11,
"sine": -2.0975806427415925e-12
},
"name": "cpu",
"tags": {
"datacenter": "us-east-1",
"host": "localhost"
},
"timestamp": 1604148850992
}
]
}

View file

@ -0,0 +1,19 @@
{
"kind": "storage#object",
"id": "test-iteration-bucket/prefix/1604148850992/1604148851414237",
"selfLink": "https://www.googleapis.com/storage/v1/b/test-iteration-bucket/o/1604148850992",
"mediaLink": "https://content-storage.googleapis.com/download/storage/v1/b/test-iteration-bucket/o/1604148850992?generation=1604148851414237&alt=media",
"name": "prefix/1604148850992",
"bucket": "test-iteration-bucket",
"generation": "1604148851414237",
"metageneration": "1",
"contentType": "text/plain; charset=utf-8",
"storageClass": "STANDARD",
"size": "161",
"md5Hash": "y59iuRCTpkm7wpvU5YHUYw==",
"crc32c": "y57reA==",
"etag": "CN2p0pPw3uwCEAE=",
"timeCreated": "2020-10-31T12:54:11.414Z",
"updated": "2020-10-31T12:54:11.414Z",
"timeStorageClassUpdated": "2020-10-31T12:54:11.414Z"
}

View file

@ -0,0 +1,24 @@
{
"kind": "storage#objects",
"items": [
{
"kind": "storage#object",
"id": "test-iteration-bucket/1604148850990/1604148851295698",
"selfLink": "https://www.googleapis.com/storage/v1/b/1604148850990/o/1604148850990",
"mediaLink": "https://content-storage.googleapis.com/download/storage/v1/b/test-iteration-bucket/o/1604148850990?generation=1604148851295698&alt=media",
"name": "1604148850990",
"bucket": "test-iteration-bucket",
"generation": "1604148851295698",
"metageneration": "1",
"contentType": "text/plain; charset=utf-8",
"storageClass": "STANDARD",
"size": "161",
"md5Hash": "y59iuRCTpkm7wpvU5YHUYw==",
"crc32c": "y57reA==",
"etag": "CNKLy5Pw3uwCEAE=",
"timeCreated": "2020-10-31T12:54:11.295Z",
"updated": "2020-10-31T12:54:11.295Z",
"timeStorageClassUpdated": "2020-10-31T12:54:11.295Z"
}
]
}

View file

@ -0,0 +1,13 @@
{
"error": {
"code": 404,
"message": "No such object: test-iteration-bucket/prefix/offset-key.json",
"errors": [
{
"message": "No such object: test-iteration-bucket/prefix/offset-key.json",
"domain": "global",
"reason": "notFound"
}
]
}
}

View file

@ -0,0 +1,16 @@
{
"metrics": [
{
"fields": {
"cosine": 12,
"sine": -3.0975806427415925e-12
},
"name": "cpu",
"tags": {
"datacenter": "us-east-1",
"host": "localhost"
},
"timestamp": 1604148850993
}
]
}

View file

@ -0,0 +1,19 @@
{
"kind": "storage#object",
"id": "test-iteration-bucket/prefix/1604148850993/1604148851467554",
"selfLink": "https://www.googleapis.com/storage/v1/b/test-iteration-bucket/o/1604148850993",
"mediaLink": "https://content-storage.googleapis.com/download/storage/v1/b/test-iteration-bucket/o/1604148850993?generation=1604148851467554&alt=media",
"name": "prefix/1604148850993",
"bucket": "test-iteration-bucket",
"generation": "1604148851467554",
"metageneration": "1",
"contentType": "text/plain; charset=utf-8",
"storageClass": "STANDARD",
"size": "161",
"md5Hash": "y59iuRCTpkm7wpvU5YHUYw==",
"crc32c": "y57reA==",
"etag": "CKLK1ZPw3uwCEAE=",
"timeCreated": "2020-10-31T12:54:11.467Z",
"updated": "2020-10-31T12:54:11.467Z",
"timeStorageClassUpdated": "2020-10-31T12:54:11.467Z"
}