Adding upstream version 1.34.4.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
e393c3af3f
commit
4978089aab
4963 changed files with 677545 additions and 0 deletions
29
tools/readme_linter/README.md
Normal file
29
tools/readme_linter/README.md
Normal file
|
@ -0,0 +1,29 @@
|
|||
# README.md linter
|
||||
|
||||
## Building
|
||||
|
||||
```shell
|
||||
telegraf/tools/readme_linter$ go build .
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
Run readme_linter with the filenames of the readme files you want to lint.
|
||||
|
||||
```shell
|
||||
telegraf/tools/readme_linter$ ./readme_linter <path to readme>
|
||||
```
|
||||
|
||||
You can lint multiple filenames at once. This works well with shell globs.
|
||||
|
||||
To lint all the plugin readmes:
|
||||
|
||||
```shell
|
||||
telegraf/tools/readme_linter$ ./readme_linter ../../plugins/*/*/README.md
|
||||
```
|
||||
|
||||
To lint readmes for inputs starting a-d:
|
||||
|
||||
```shell
|
||||
telegraf/tools/readme_linter$ ./readme_linter ../../plugins/inputs/[a-d]*/README.md
|
||||
```
|
158
tools/readme_linter/assert.go
Normal file
158
tools/readme_linter/assert.go
Normal file
|
@ -0,0 +1,158 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"sort"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
)
|
||||
|
||||
// type for all linter assert methods
|
||||
type T struct {
|
||||
filename string
|
||||
markdown []byte
|
||||
newlineOffsets []int
|
||||
sourceFlag bool
|
||||
pluginType plugin
|
||||
|
||||
fails int
|
||||
}
|
||||
|
||||
// called by all assert functions that involve a node
|
||||
func (t *T) printFailedAssertf(n ast.Node, format string, args ...interface{}) {
|
||||
t.printFile(n)
|
||||
fmt.Printf(format+"\n", args...)
|
||||
t.printRule(3)
|
||||
t.fails++
|
||||
}
|
||||
|
||||
// Assert function that doesnt involve a node, for example if something is missing
|
||||
func (t *T) assertf(format string, args ...interface{}) {
|
||||
t.assertLine2f(0, format, args...) // There's no line number associated, so use the first
|
||||
}
|
||||
|
||||
func (t *T) assertNodef(n ast.Node, format string, args ...interface{}) {
|
||||
t.printFailedAssertf(n, format, args...)
|
||||
}
|
||||
|
||||
func (t *T) assertNodeLineOffsetf(n ast.Node, offset int, format string, args ...interface{}) {
|
||||
t.printFileOffset(n, offset)
|
||||
fmt.Printf(format+"\n", args...)
|
||||
t.printRule(3)
|
||||
t.fails++
|
||||
}
|
||||
|
||||
func (t *T) assertLinef(line int, format string, args ...interface{}) {
|
||||
// this func only exists to make the call stack to t.printRule the same depth
|
||||
// as when called through assertf
|
||||
|
||||
t.assertLine2f(line, format, args...)
|
||||
}
|
||||
|
||||
func (t *T) assertLine2f(line int, format string, args ...interface{}) {
|
||||
t.printFileLine(line)
|
||||
fmt.Printf(format+"\n", args...)
|
||||
t.printRule(3)
|
||||
t.fails++
|
||||
}
|
||||
|
||||
func (t *T) printRule(callers int) {
|
||||
if !t.sourceFlag {
|
||||
return
|
||||
}
|
||||
|
||||
pc, codeFilename, codeLine, ok := runtime.Caller(callers)
|
||||
if !ok {
|
||||
panic("can not get caller")
|
||||
}
|
||||
|
||||
f := runtime.FuncForPC(pc)
|
||||
var funcName string
|
||||
if f != nil {
|
||||
funcName = f.Name()
|
||||
}
|
||||
|
||||
fmt.Printf("%s:%d: ", codeFilename, codeLine)
|
||||
if len(funcName) == 0 {
|
||||
fmt.Printf("failed assert\n")
|
||||
} else {
|
||||
fmt.Printf("failed assert in function %s\n", funcName)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *T) line(offset int) int {
|
||||
return sort.SearchInts(t.newlineOffsets, offset)
|
||||
}
|
||||
|
||||
func (t *T) printFile(n ast.Node) {
|
||||
t.printFileOffset(n, 0)
|
||||
}
|
||||
|
||||
func (t *T) printFileOffset(n ast.Node, offset int) {
|
||||
lines := n.Lines()
|
||||
if lines == nil || lines.Len() == 0 {
|
||||
t.printFileLine(0)
|
||||
return
|
||||
}
|
||||
line := t.line(lines.At(0).Start)
|
||||
t.printFileLine(line + offset)
|
||||
}
|
||||
|
||||
func (t *T) printFileLine(line int) {
|
||||
fmt.Printf("%s:%d: ", t.filename, line+1) // Lines start with 1
|
||||
}
|
||||
|
||||
func (t *T) printPassFail() {
|
||||
if t.fails == 0 {
|
||||
fmt.Printf("Pass %s\n", t.filename)
|
||||
} else {
|
||||
fmt.Printf("Fail %s, %d failed assertions\n", t.filename, t.fails)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *T) assertKind(expected ast.NodeKind, n ast.Node) {
|
||||
if n.Kind() == expected {
|
||||
return
|
||||
}
|
||||
|
||||
t.printFailedAssertf(n, "expected %s, have %s", expected.String(), n.Kind().String())
|
||||
}
|
||||
|
||||
func (t *T) assertFirstChildRegexp(expectedPattern string, n ast.Node) {
|
||||
var validRegexp = regexp.MustCompile(expectedPattern)
|
||||
|
||||
if !n.HasChildren() {
|
||||
t.printFailedAssertf(n, "expected children")
|
||||
return
|
||||
}
|
||||
c := n.FirstChild()
|
||||
|
||||
//nolint:staticcheck // need to use this since we aren't sure the type
|
||||
actual := string(c.Text(t.markdown))
|
||||
|
||||
if !validRegexp.MatchString(actual) {
|
||||
t.printFailedAssertf(n, "%q does not match regexp %q", actual, expectedPattern)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (t *T) assertHeadingLevel(expected int, n ast.Node) {
|
||||
h, ok := n.(*ast.Heading)
|
||||
if !ok {
|
||||
fmt.Printf("failed Heading type assertion\n")
|
||||
t.fails++
|
||||
return
|
||||
}
|
||||
|
||||
if h.Level == expected {
|
||||
return
|
||||
}
|
||||
|
||||
t.printFailedAssertf(n, "expected header level %d, have %d", expected, h.Level)
|
||||
}
|
||||
|
||||
func (t *T) pass() bool {
|
||||
return t.fails == 0
|
||||
}
|
149
tools/readme_linter/main.go
Normal file
149
tools/readme_linter/main.go
Normal file
|
@ -0,0 +1,149 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"flag"
|
||||
"os"
|
||||
|
||||
"github.com/yuin/goldmark"
|
||||
"github.com/yuin/goldmark/ast"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/text"
|
||||
"github.com/yuin/goldmark/util"
|
||||
)
|
||||
|
||||
func main() {
|
||||
sourceFlag := flag.Bool("source", false, "include location of linter code that failed assertion")
|
||||
quiet := flag.Bool("quiet", false, "only print failed assertion but no pass information")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
var err error
|
||||
pass := true
|
||||
for _, filename := range flag.Args() {
|
||||
var filePass bool
|
||||
filePass, err = checkFile(filename, guessPluginType(filename), *sourceFlag, *quiet)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
pass = pass && filePass
|
||||
}
|
||||
if !pass {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
type ruleFunc func(*T, ast.Node) error
|
||||
|
||||
type rulesMap map[plugin][]ruleFunc
|
||||
|
||||
var rules rulesMap
|
||||
|
||||
func init() {
|
||||
rules = make(rulesMap)
|
||||
|
||||
// Rules for all plugin types
|
||||
all := []ruleFunc{
|
||||
firstSection,
|
||||
metadata,
|
||||
configSection,
|
||||
relativeTelegrafLinks,
|
||||
noLongLinesInParagraphs(80),
|
||||
}
|
||||
for i := pluginInput; i <= pluginParser; i++ {
|
||||
rules[i] = all
|
||||
}
|
||||
|
||||
// Rules for input plugins
|
||||
rules[pluginInput] = append(rules[pluginInput], []ruleFunc{
|
||||
requiredSectionsClose([]string{
|
||||
"Example Output",
|
||||
"Metrics",
|
||||
"Global configuration options",
|
||||
}),
|
||||
}...)
|
||||
|
||||
// Rules for output plugins
|
||||
rules[pluginOutput] = append(rules[pluginOutput], []ruleFunc{
|
||||
requiredSectionsClose([]string{
|
||||
"Global configuration options",
|
||||
}),
|
||||
}...)
|
||||
|
||||
// Rules for processor pluings
|
||||
rules[pluginProcessor] = append(rules[pluginProcessor], []ruleFunc{
|
||||
requiredSectionsClose([]string{
|
||||
"Global configuration options",
|
||||
}),
|
||||
}...)
|
||||
|
||||
// Rules for aggregator pluings
|
||||
rules[pluginAggregator] = append(rules[pluginAggregator], []ruleFunc{
|
||||
requiredSectionsClose([]string{
|
||||
"Global configuration options",
|
||||
}),
|
||||
}...)
|
||||
}
|
||||
|
||||
func checkFile(filename string, pluginType plugin, sourceFlag, quiet bool) (bool, error) {
|
||||
md, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// Goldmark returns locations as offsets. We want line
|
||||
// numbers. Find the newlines in the file so we can translate
|
||||
// later.
|
||||
scanner := bufio.NewScanner(bytes.NewReader(md))
|
||||
scanner.Split(bufio.ScanRunes)
|
||||
offset := 0
|
||||
newlineOffsets := make([]int, 0)
|
||||
for scanner.Scan() {
|
||||
if scanner.Text() == "\n" {
|
||||
newlineOffsets = append(newlineOffsets, offset)
|
||||
}
|
||||
|
||||
offset++
|
||||
}
|
||||
|
||||
p := goldmark.DefaultParser()
|
||||
|
||||
// We need goldmark to parse tables, otherwise they show up as
|
||||
// paragraphs. Since tables often have long lines and we check for long
|
||||
// lines in paragraphs, without table parsing there are false positive long
|
||||
// lines in tables.
|
||||
//
|
||||
// The tableParagraphTransformer is an extension and not part of the default
|
||||
// parser so we add it. There may be an easier way to do it, but this works:
|
||||
p.AddOptions(
|
||||
parser.WithParagraphTransformers(
|
||||
util.Prioritized(extension.NewTableParagraphTransformer(), 99),
|
||||
),
|
||||
)
|
||||
|
||||
r := text.NewReader(md)
|
||||
root := p.Parse(r)
|
||||
|
||||
rules := rules[pluginType]
|
||||
|
||||
tester := T{
|
||||
filename: filename,
|
||||
markdown: md,
|
||||
newlineOffsets: newlineOffsets,
|
||||
sourceFlag: sourceFlag,
|
||||
pluginType: pluginType,
|
||||
}
|
||||
for _, rule := range rules {
|
||||
err = rule(&tester, root)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
if !quiet {
|
||||
tester.printPassFail()
|
||||
}
|
||||
|
||||
return tester.pass(), nil
|
||||
}
|
36
tools/readme_linter/plugin.go
Normal file
36
tools/readme_linter/plugin.go
Normal file
|
@ -0,0 +1,36 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type plugin int
|
||||
|
||||
const (
|
||||
pluginNone plugin = iota
|
||||
pluginInput
|
||||
pluginOutput
|
||||
pluginProcessor
|
||||
pluginAggregator
|
||||
pluginParser
|
||||
)
|
||||
|
||||
func guessPluginType(filename string) plugin {
|
||||
// Switch takes `plugins/inputs/amd_rocm_smi/README.md` and converts it to
|
||||
// `plugins/inputs`. This avoids parsing READMEs that are under a plugin
|
||||
// like those found in test folders as actual plugin readmes.
|
||||
switch filepath.Dir(filepath.Dir(filename)) {
|
||||
case "plugins/inputs":
|
||||
return pluginInput
|
||||
case "plugins/outputs":
|
||||
return pluginOutput
|
||||
case "plugins/processors":
|
||||
return pluginProcessor
|
||||
case "plugins/aggregators":
|
||||
return pluginAggregator
|
||||
case "plugins/parsers":
|
||||
return pluginParser
|
||||
default:
|
||||
return pluginNone
|
||||
}
|
||||
}
|
456
tools/readme_linter/rules.go
Normal file
456
tools/readme_linter/rules.go
Normal file
|
@ -0,0 +1,456 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/yuin/goldmark/ast"
|
||||
)
|
||||
|
||||
var (
|
||||
// Setup regular expression for checking versions and valid choices
|
||||
// Matches HTML comments (e.g., <!-- some comment -->) surrounded by optional whitespace
|
||||
metaComment = regexp.MustCompile(`(?:\s*<!-- .* -->\s*)`)
|
||||
|
||||
// Matches Telegraf versioning format (e.g., "Telegraf v1.2.3")
|
||||
metaVersion = regexp.MustCompile(`^Telegraf v\d+\.\d+\.\d+(?:\s+<!-- .* -->\s*)?$`)
|
||||
|
||||
metaTags = map[plugin][]string{
|
||||
pluginInput: {
|
||||
"applications",
|
||||
"cloud",
|
||||
"containers",
|
||||
"datastore",
|
||||
"hardware",
|
||||
"iot",
|
||||
"logging",
|
||||
"messaging",
|
||||
"network",
|
||||
"server",
|
||||
"system",
|
||||
"testing",
|
||||
"web",
|
||||
},
|
||||
pluginOutput: {
|
||||
"applications",
|
||||
"cloud",
|
||||
"containers",
|
||||
"datastore",
|
||||
"hardware",
|
||||
"iot",
|
||||
"logging",
|
||||
"messaging",
|
||||
"network",
|
||||
"server",
|
||||
"system",
|
||||
"testing",
|
||||
"web",
|
||||
},
|
||||
pluginAggregator: {
|
||||
"math",
|
||||
"sampling",
|
||||
"statistics",
|
||||
"transformation",
|
||||
},
|
||||
pluginProcessor: {
|
||||
"math",
|
||||
"sampling",
|
||||
"statistics",
|
||||
"transformation",
|
||||
},
|
||||
}
|
||||
|
||||
metaOSes = []string{
|
||||
"all",
|
||||
"freebsd",
|
||||
"linux",
|
||||
"macos",
|
||||
"solaris",
|
||||
"windows",
|
||||
}
|
||||
|
||||
metaOrder = []string{
|
||||
"introduction version",
|
||||
"deprecation version",
|
||||
"removal version",
|
||||
"tags",
|
||||
"operating systems",
|
||||
}
|
||||
)
|
||||
|
||||
// The first section is a heading with plugin name and paragraph short
|
||||
// description
|
||||
func firstSection(t *T, root ast.Node) error {
|
||||
var n ast.Node
|
||||
n = root.FirstChild()
|
||||
|
||||
// Ignore HTML comments such as linter ignore sections
|
||||
for {
|
||||
if n == nil {
|
||||
break
|
||||
}
|
||||
if _, ok := n.(*ast.HTMLBlock); !ok {
|
||||
break
|
||||
}
|
||||
n = n.NextSibling()
|
||||
}
|
||||
|
||||
t.assertKind(ast.KindHeading, n)
|
||||
t.assertHeadingLevel(1, n)
|
||||
t.assertFirstChildRegexp(` Plugin$`, n)
|
||||
|
||||
// Make sure there is some text after the heading
|
||||
n = n.NextSibling()
|
||||
t.assertKind(ast.KindParagraph, n)
|
||||
length := len(n.(*ast.Paragraph).Lines().Value(t.markdown))
|
||||
if length < 30 {
|
||||
t.assertNodef(n, "short first section. Please add short description of plugin. length %d, minimum 30", length)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Somewhere there should be a heading "sample configuration" and a
|
||||
// toml code block. The toml should match what is in the plugin's go
|
||||
// code
|
||||
|
||||
// Second level headings should include
|
||||
func requiredSections(t *T, root ast.Node, headings []string) error {
|
||||
headingsSet := newSet(headings)
|
||||
|
||||
expectedLevel := 2
|
||||
|
||||
titleCounts := make(map[string]int)
|
||||
|
||||
for n := root.FirstChild(); n != nil; n = n.NextSibling() {
|
||||
var h *ast.Heading
|
||||
var ok bool
|
||||
if h, ok = n.(*ast.Heading); !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
child := h.FirstChild()
|
||||
if child == nil {
|
||||
continue
|
||||
}
|
||||
//nolint:staticcheck // need to use this since we aren't sure the type
|
||||
title := strings.TrimSpace(string(child.Text(t.markdown)))
|
||||
if headingsSet.has(title) && h.Level != expectedLevel {
|
||||
t.assertNodef(n, "has required section %q but wrong heading level. Expected level %d, found %d",
|
||||
title, expectedLevel, h.Level)
|
||||
}
|
||||
|
||||
titleCounts[title]++
|
||||
}
|
||||
|
||||
headingsSet.forEach(func(title string) {
|
||||
if _, exists := titleCounts[title]; !exists {
|
||||
t.assertf("missing required section %q", title)
|
||||
}
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Use this to make a rule that looks for a list of settings. (this is
|
||||
// a closure of func requiredSection)
|
||||
func requiredSectionsClose(headings []string) func(*T, ast.Node) error {
|
||||
return func(t *T, root ast.Node) error {
|
||||
return requiredSections(t, root, headings)
|
||||
}
|
||||
}
|
||||
|
||||
func noLongLinesInParagraphs(threshold int) func(*T, ast.Node) error {
|
||||
return func(t *T, root ast.Node) error {
|
||||
// We're looking for long lines in paragraphs. Find paragraphs
|
||||
// first, then which lines are in paragraphs
|
||||
paraLines := make([]int, 0)
|
||||
for n := root.FirstChild(); n != nil; n = n.NextSibling() {
|
||||
var p *ast.Paragraph
|
||||
var ok bool
|
||||
if p, ok = n.(*ast.Paragraph); !ok {
|
||||
continue // only looking for paragraphs
|
||||
}
|
||||
|
||||
segs := p.Lines()
|
||||
for _, seg := range segs.Sliced(0, segs.Len()) {
|
||||
line := t.line(seg.Start)
|
||||
paraLines = append(paraLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
// Find long lines in the whole file
|
||||
longLines := make([]int, 0, len(t.newlineOffsets))
|
||||
last := 0
|
||||
for i, cur := range t.newlineOffsets {
|
||||
length := cur - last - 1 // -1 to exclude the newline
|
||||
if length > threshold {
|
||||
longLines = append(longLines, i)
|
||||
}
|
||||
last = cur
|
||||
}
|
||||
|
||||
// Merge both lists
|
||||
p := 0
|
||||
l := 0
|
||||
bads := make([]int, 0, max(len(paraLines), len(longLines)))
|
||||
for p < len(paraLines) && l < len(longLines) {
|
||||
long := longLines[l]
|
||||
para := paraLines[p]
|
||||
switch {
|
||||
case long == para:
|
||||
bads = append(bads, long)
|
||||
p++
|
||||
l++
|
||||
case long < para:
|
||||
l++
|
||||
case long > para:
|
||||
p++
|
||||
}
|
||||
}
|
||||
|
||||
for _, bad := range bads {
|
||||
t.assertLinef(bad, "long line in paragraph")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func configSection(t *T, root ast.Node) error {
|
||||
var config *ast.Heading
|
||||
config = nil
|
||||
expectedTitle := "Configuration"
|
||||
for n := root.FirstChild(); n != nil; n = n.NextSibling() {
|
||||
var h *ast.Heading
|
||||
var ok bool
|
||||
if h, ok = n.(*ast.Heading); !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
//nolint:staticcheck // need to use this since we aren't sure the type
|
||||
title := string(h.FirstChild().Text(t.markdown))
|
||||
if title == expectedTitle {
|
||||
config = h
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if config == nil {
|
||||
t.assertf("missing required section %q", expectedTitle)
|
||||
return nil
|
||||
}
|
||||
|
||||
toml := config.NextSibling()
|
||||
if toml == nil {
|
||||
t.assertNodef(toml, "missing config next sibling")
|
||||
return nil
|
||||
}
|
||||
|
||||
var b *ast.FencedCodeBlock
|
||||
var ok bool
|
||||
if b, ok = toml.(*ast.FencedCodeBlock); !ok {
|
||||
t.assertNodef(toml, "config next sibling isn't a fenced code block")
|
||||
return nil
|
||||
}
|
||||
|
||||
if !bytes.Equal(b.Language(t.markdown), []byte("toml")) {
|
||||
t.assertNodef(b, "config fenced code block isn't toml language")
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Links from one markdown file to another in the repo should be relative
|
||||
func relativeTelegrafLinks(t *T, root ast.Node) error {
|
||||
for n := root.FirstChild(); n != nil; n = n.NextSibling() {
|
||||
if _, ok := n.(*ast.Paragraph); !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for n2 := n.FirstChild(); n2 != nil; n2 = n2.NextSibling() {
|
||||
var l *ast.Link
|
||||
var ok bool
|
||||
if l, ok = n2.(*ast.Link); !ok {
|
||||
continue
|
||||
}
|
||||
link := string(l.Destination)
|
||||
if strings.HasPrefix(link, "https://github.com/influxdata/telegraf/blob") {
|
||||
t.assertNodef(n, "in-repo link must be relative: %s", link)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Each plugin should have metadata for documentation generation
|
||||
func metadata(t *T, root ast.Node) error {
|
||||
const icons string = "⭐🚩🔥🏷️💻"
|
||||
|
||||
n := root.FirstChild()
|
||||
if n == nil {
|
||||
t.assertf("no metadata section found")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Advance to the first heading which should be the plugin header
|
||||
for n != nil {
|
||||
if _, ok := n.(*ast.Heading); ok {
|
||||
t.assertHeadingLevel(1, n)
|
||||
break
|
||||
}
|
||||
n = n.NextSibling()
|
||||
}
|
||||
|
||||
// Get the description text and check for metadata
|
||||
positions := make([]string, 0, 5)
|
||||
for n != nil {
|
||||
n = n.NextSibling()
|
||||
|
||||
// The next heading will end the initial section
|
||||
if _, ok := n.(*ast.Heading); ok {
|
||||
break
|
||||
}
|
||||
|
||||
// Ignore everything that is not text
|
||||
para, ok := n.(*ast.Paragraph)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Metadata should be separate paragraph with the items ordered.
|
||||
var inMetadata bool
|
||||
var counter int
|
||||
scanner := bufio.NewScanner(bytes.NewBuffer(para.Lines().Value(t.markdown)))
|
||||
for scanner.Scan() {
|
||||
txt := scanner.Text()
|
||||
if counter == 0 {
|
||||
inMetadata = strings.ContainsAny(txt, icons)
|
||||
}
|
||||
counter++
|
||||
|
||||
// If we are not in a metadata section, we need to make sure we don't
|
||||
// see any metadata in this text.
|
||||
if !inMetadata {
|
||||
if strings.ContainsAny(txt, icons) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "metadata found in section not surrounded by empty lines")
|
||||
return nil
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
icon, remainder, found := strings.Cut(txt, " ")
|
||||
if !found || !strings.Contains(icons, icon) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "metadata line must start with a valid icon and a space")
|
||||
continue
|
||||
}
|
||||
if strings.ContainsAny(remainder, icons) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "each metadata entry must be on a separate line")
|
||||
continue
|
||||
}
|
||||
|
||||
// We are in a metadata section, so test for the correct structure
|
||||
switch icon {
|
||||
case "⭐":
|
||||
if !metaVersion.MatchString(remainder) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "invalid introduction version format; has to be 'Telegraf vX.Y.Z'")
|
||||
}
|
||||
positions = append(positions, "introduction version")
|
||||
case "🚩":
|
||||
if !metaVersion.MatchString(remainder) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "invalid deprecation version format; has to be 'Telegraf vX.Y.Z'")
|
||||
}
|
||||
positions = append(positions, "deprecation version")
|
||||
case "🔥":
|
||||
if !metaVersion.MatchString(remainder) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "invalid removal version format; has to be 'Telegraf vX.Y.Z'")
|
||||
}
|
||||
positions = append(positions, "removal version")
|
||||
case "🏷️":
|
||||
validTags, found := metaTags[t.pluginType]
|
||||
if !found {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "no tags expected for plugin type")
|
||||
continue
|
||||
}
|
||||
|
||||
for _, tag := range strings.Split(remainder, ",") {
|
||||
tag = metaComment.ReplaceAllString(tag, "")
|
||||
if !slices.Contains(validTags, strings.TrimSpace(tag)) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "unknown tag %q", tag)
|
||||
}
|
||||
}
|
||||
positions = append(positions, "tags")
|
||||
case "💻":
|
||||
for _, os := range strings.Split(remainder, ",") {
|
||||
os = metaComment.ReplaceAllString(os, "")
|
||||
if !slices.Contains(metaOSes, strings.TrimSpace(os)) {
|
||||
t.assertNodeLineOffsetf(n, counter-1, "unknown operating system %q", os)
|
||||
}
|
||||
}
|
||||
positions = append(positions, "operating systems")
|
||||
default:
|
||||
t.assertNodeLineOffsetf(n, counter-1, "invalid metadata icon")
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(positions) == 0 {
|
||||
t.assertf("metadata is missing")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check for duplicate entries
|
||||
seen := make(map[string]bool)
|
||||
for _, p := range positions {
|
||||
if seen[p] {
|
||||
t.assertNodef(n, "duplicate metadata entry for %q", p)
|
||||
return nil
|
||||
}
|
||||
seen[p] = true
|
||||
}
|
||||
|
||||
// Remove the optional entries from the checklist
|
||||
validOrder := append(make([]string, 0, len(metaOrder)), metaOrder...)
|
||||
if !slices.Contains(positions, "deprecation version") && !slices.Contains(positions, "removal version") {
|
||||
idx := slices.Index(validOrder, "deprecation version")
|
||||
validOrder = slices.Delete(validOrder, idx, idx+1)
|
||||
idx = slices.Index(validOrder, "removal version")
|
||||
validOrder = slices.Delete(validOrder, idx, idx+1)
|
||||
}
|
||||
if _, found := metaTags[t.pluginType]; !found {
|
||||
idx := slices.Index(metaOrder, "tags")
|
||||
metaOrder = slices.Delete(metaOrder, idx, idx+1)
|
||||
}
|
||||
|
||||
// Check the order of the metadata entries and required entries
|
||||
if len(validOrder) != len(positions) {
|
||||
for _, v := range validOrder {
|
||||
if !slices.Contains(positions, v) {
|
||||
t.assertNodef(n, "metadata entry for %q is missing", v)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
for i, v := range validOrder {
|
||||
if v != positions[i] {
|
||||
if i == 0 {
|
||||
t.assertNodef(n, "%q has to be the first entry", v)
|
||||
} else {
|
||||
t.assertNodef(n, "%q has to follow %q", v, validOrder[i-1])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// To do: Check markdown files that aren't plugin readme files for paragraphs
|
||||
// with long lines
|
||||
|
||||
// To do: Check the toml inside the configuration section for syntax errors
|
32
tools/readme_linter/set.go
Normal file
32
tools/readme_linter/set.go
Normal file
|
@ -0,0 +1,32 @@
|
|||
package main
|
||||
|
||||
type set struct {
|
||||
m map[string]struct{}
|
||||
}
|
||||
|
||||
func (s *set) add(key string) {
|
||||
s.m[key] = struct{}{}
|
||||
}
|
||||
|
||||
func (s *set) has(key string) bool {
|
||||
var ok bool
|
||||
_, ok = s.m[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
func (s *set) forEach(f func(string)) {
|
||||
for key := range s.m {
|
||||
f(key)
|
||||
}
|
||||
}
|
||||
|
||||
func newSet(elems []string) *set {
|
||||
s := &set{
|
||||
m: make(map[string]struct{}),
|
||||
}
|
||||
|
||||
for _, elem := range elems {
|
||||
s.add(elem)
|
||||
}
|
||||
return s
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue