Skip to content

Commit

Permalink
chore: Fix typos throughout codebase (#15338)
Browse files Browse the repository at this point in the history
  • Loading branch information
szepeviktor authored May 31, 2024
1 parent 536b3d0 commit 31a1d34
Show file tree
Hide file tree
Showing 54 changed files with 150 additions and 150 deletions.
4 changes: 2 additions & 2 deletions CHANGELOG-1.13.md
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@

- [#5261](https://github.com/influxdata/telegraf/pull/5261): Fix arithmetic overflow in sqlserver input.
- [#5194](https://github.com/influxdata/telegraf/issues/5194): Fix latest metrics not sent first when output fails.
- [#5285](https://github.com/influxdata/telegraf/issues/5285): Fix amqp_consumer stops consuming when it receives unparseable messages.
- [#5285](https://github.com/influxdata/telegraf/issues/5285): Fix amqp_consumer stops consuming when it receives unparsable messages.
- [#5281](https://github.com/influxdata/telegraf/issues/5281): Fix prometheus input not detecting added and removed pods.
- [#5215](https://github.com/influxdata/telegraf/issues/5215): Remove userinfo from cluster tag in couchbase.
- [#5298](https://github.com/influxdata/telegraf/issues/5298): Fix internal_write buffer_size not reset on timed writes.
Expand Down Expand Up @@ -917,7 +917,7 @@
- [#4498](https://github.com/influxdata/telegraf/pull/4498): Keep leading whitespace for messages in syslog input.
- [#4470](https://github.com/influxdata/telegraf/issues/4470): Skip bad entries on interrupt input.
- [#4501](https://github.com/influxdata/telegraf/issues/4501): Preserve metric type when using filters in output plugins.
- [#3794](https://github.com/influxdata/telegraf/issues/3794): Fix error message if URL is unparseable in influxdb output.
- [#3794](https://github.com/influxdata/telegraf/issues/3794): Fix error message if URL is unparsable in influxdb output.
- [#4059](https://github.com/influxdata/telegraf/issues/4059): Use explicit zpool properties to fix parse error on FreeBSD 11.2.
- [#4514](https://github.com/influxdata/telegraf/pull/4514): Lock buffer when adding metrics.

Expand Down
20 changes: 10 additions & 10 deletions config/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,7 @@ func TestConfig_InlineTables(t *testing.T) {
require.NoError(t, c.LoadConfig("./testdata/inline_table.toml"))
require.Len(t, c.Outputs, 2)

output, ok := c.Outputs[1].Output.(*MockupOuputPlugin)
output, ok := c.Outputs[1].Output.(*MockupOutputPlugin)
require.True(t, ok)
require.Equal(t, map[string]string{"Authorization": "Token test", "Content-Type": "application/json"}, output.Headers)
require.Equal(t, []string{"org_id"}, c.Outputs[0].Config.Filter.TagInclude)
Expand All @@ -484,7 +484,7 @@ func TestConfig_SliceComment(t *testing.T) {
require.NoError(t, c.LoadConfig("./testdata/slice_comment.toml"))
require.Len(t, c.Outputs, 1)

output, ok := c.Outputs[0].Output.(*MockupOuputPlugin)
output, ok := c.Outputs[0].Output.(*MockupOutputPlugin)
require.True(t, ok)
require.Equal(t, []string{"test"}, output.Scopes)
}
Expand All @@ -510,7 +510,7 @@ func TestConfig_AzureMonitorNamespacePrefix(t *testing.T) {

expectedPrefix := []string{"Telegraf/", ""}
for i, plugin := range c.Outputs {
output, ok := plugin.Output.(*MockupOuputPlugin)
output, ok := plugin.Output.(*MockupOutputPlugin)
require.True(t, ok)
require.Equal(t, expectedPrefix[i], output.NamespacePrefix)
}
Expand Down Expand Up @@ -1453,7 +1453,7 @@ func (m *MockupProcessorPluginParserFunc) SetParserFunc(pf telegraf.ParserFunc)
}

/*** Mockup OUTPUT plugin for testing to avoid cyclic dependencies ***/
type MockupOuputPlugin struct {
type MockupOutputPlugin struct {
URL string `toml:"url"`
Headers map[string]string `toml:"headers"`
Scopes []string `toml:"scopes"`
Expand All @@ -1462,16 +1462,16 @@ type MockupOuputPlugin struct {
tls.ClientConfig
}

func (m *MockupOuputPlugin) Connect() error {
func (m *MockupOutputPlugin) Connect() error {
return nil
}
func (m *MockupOuputPlugin) Close() error {
func (m *MockupOutputPlugin) Close() error {
return nil
}
func (m *MockupOuputPlugin) SampleConfig() string {
func (m *MockupOutputPlugin) SampleConfig() string {
return "Mockup test output plugin"
}
func (m *MockupOuputPlugin) Write(_ []telegraf.Metric) error {
func (m *MockupOutputPlugin) Write(_ []telegraf.Metric) error {
return nil
}

Expand Down Expand Up @@ -1624,10 +1624,10 @@ func init() {

// Register the mockup output plugin for the required names
outputs.Add("azure_monitor", func() telegraf.Output {
return &MockupOuputPlugin{NamespacePrefix: "Telegraf/"}
return &MockupOutputPlugin{NamespacePrefix: "Telegraf/"}
})
outputs.Add("http", func() telegraf.Output {
return &MockupOuputPlugin{}
return &MockupOutputPlugin{}
})
outputs.Add("serializer_test_new", func() telegraf.Output {
return &MockupOutputPluginSerializerNew{}
Expand Down
8 changes: 4 additions & 4 deletions config/plugin_id.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,19 +32,19 @@ func processTable(parent string, table *ast.Table) ([]keyValuePair, error) {
})
case *ast.Table:
key := prefix + k
childs, err := processTable(key, v)
children, err := processTable(key, v)
if err != nil {
return nil, fmt.Errorf("parsing table for %q failed: %w", key, err)
}
options = append(options, childs...)
options = append(options, children...)
case []*ast.Table:
for i, t := range v {
key := fmt.Sprintf("%s#%d.%s", prefix, i, k)
childs, err := processTable(key, t)
children, err := processTable(key, t)
if err != nil {
return nil, fmt.Errorf("parsing table for %q #%d failed: %w", key, i, err)
}
options = append(options, childs...)
options = append(options, children...)
}
default:
return nil, fmt.Errorf("unknown node type %T in key %q", value, prefix+k)
Expand Down
2 changes: 1 addition & 1 deletion docs/developers/REVIEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ In case you still want to continue with the PR, feel free to reopen it.

## Linting

Each pull request will have the appropriate linters checking the files for any common mistakes. The github action Super Linter is used: [super-pinter](https://github.com/github/super-linter). If it is failing you can click on the action and read the logs to figure out the issue. You can also run the github action locally by following these instructions: [run-linter-locally.md](https://github.com/github/super-linter/blob/main/docs/run-linter-locally.md). You can find more information on each of the linters in the super linter readme.
Each pull request will have the appropriate linters checking the files for any common mistakes. The github action Super Linter is used: [super-linter](https://github.com/github/super-linter). If it is failing you can click on the action and read the logs to figure out the issue. You can also run the github action locally by following these instructions: [run-linter-locally.md](https://github.com/github/super-linter/blob/main/docs/run-linter-locally.md). You can find more information on each of the linters in the super linter readme.

## Testing

Expand Down
4 changes: 2 additions & 2 deletions plugins/common/kafka/sasl.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import (
type SASLAuth struct {
SASLUsername config.Secret `toml:"sasl_username"`
SASLPassword config.Secret `toml:"sasl_password"`
SASLExtentions map[string]string `toml:"sasl_extensions"`
SASLExtensions map[string]string `toml:"sasl_extensions"`
SASLMechanism string `toml:"sasl_mechanism"`
SASLVersion *int `toml:"sasl_version"`

Expand Down Expand Up @@ -92,7 +92,7 @@ func (k *SASLAuth) Token() (*sarama.AccessToken, error) {
defer token.Destroy()
return &sarama.AccessToken{
Token: token.String(),
Extensions: k.SASLExtentions,
Extensions: k.SASLExtensions,
}, nil
}

Expand Down
8 changes: 4 additions & 4 deletions plugins/inputs/aliyuncms/aliyuncms.go
Original file line number Diff line number Diff line change
Expand Up @@ -374,8 +374,8 @@ func parseTag(tagSpec string, data interface{}) (tagKey string, tagValue string,

func (s *AliyunCMS) prepareTagsAndDimensions(metric *Metric) {
var (
newData bool
defaulTags = []string{"RegionId:RegionId"}
newData bool
defaultTags = []string{"RegionId:RegionId"}
)

if s.dt == nil { //Discovery is not activated
Expand Down Expand Up @@ -411,7 +411,7 @@ L:
//Start filing tags
//Remove old value if exist
delete(metric.discoveryTags, instanceID)
metric.discoveryTags[instanceID] = make(map[string]string, len(metric.TagsQueryPath)+len(defaulTags))
metric.discoveryTags[instanceID] = make(map[string]string, len(metric.TagsQueryPath)+len(defaultTags))

for _, tagQueryPath := range metric.TagsQueryPath {
tagKey, tagValue, err := parseTag(tagQueryPath, elem)
Expand All @@ -428,7 +428,7 @@ L:
}

//Adding default tags if not already there
for _, defaultTagQP := range defaulTags {
for _, defaultTagQP := range defaultTags {
tagKey, tagValue, err := parseTag(defaultTagQP, elem)

if err != nil {
Expand Down
18 changes: 9 additions & 9 deletions plugins/inputs/aliyuncms/aliyuncms_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -391,10 +391,10 @@ func TestGather(t *testing.T) {

//test table:
tests := []struct {
name string
hasMeasurment bool
metricNames []string
expected []telegraf.Metric
name string
hasMeasurement bool
metricNames []string
expected []telegraf.Metric
}{
{
name: "Empty data point",
Expand All @@ -408,9 +408,9 @@ func TestGather(t *testing.T) {
},
},
{
name: "Data point with fields & tags",
hasMeasurment: true,
metricNames: []string{"InstanceActiveConnection"},
name: "Data point with fields & tags",
hasMeasurement: true,
metricNames: []string{"InstanceActiveConnection"},
expected: []telegraf.Metric{
testutil.MustMetric(
"aliyuncms_acs_slb_dashboard",
Expand All @@ -434,8 +434,8 @@ func TestGather(t *testing.T) {
var acc testutil.Accumulator
plugin.Metrics[0].MetricNames = tt.metricNames
require.Empty(t, acc.GatherError(plugin.Gather))
require.Equal(t, acc.HasMeasurement("aliyuncms_acs_slb_dashboard"), tt.hasMeasurment)
if tt.hasMeasurment {
require.Equal(t, acc.HasMeasurement("aliyuncms_acs_slb_dashboard"), tt.hasMeasurement)
if tt.hasMeasurement {
acc.AssertContainsTaggedFields(t, "aliyuncms_acs_slb_dashboard", tt.expected[0].Fields(), tt.expected[0].Tags())
}
})
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/amd_rocm_smi/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,6 @@ of versions and small set of GPUs. Currently the latest ROCm version tested is
information provided by `rocm-smi` can vary so that some fields would start/stop
appearing in the metrics upon updates. The `rocm-smi` JSON output is not
perfectly homogeneous and is possibly changing in the future, hence parsing and
unmarshaling can start failing upon updating ROCm.
unmarshalling can start failing upon updating ROCm.

Inspired by the current state of the art of the `nvidia-smi` plugin.
16 changes: 8 additions & 8 deletions plugins/inputs/amqp_consumer/amqp_consumer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,11 @@ func TestIntegration(t *testing.T) {
"test,source=B value=1i 1712780301000000100",
"test,source=C value=2i 1712780301000000200",
}
expexted := make([]telegraf.Metric, 0, len(metrics))
expected := make([]telegraf.Metric, 0, len(metrics))
for _, x := range metrics {
m, err := parser.Parse([]byte(x))
require.NoError(t, err)
expexted = append(expexted, m...)
expected = append(expected, m...)
}

// Start the plugin
Expand All @@ -141,12 +141,12 @@ func TestIntegration(t *testing.T) {

// Verify that the metrics were actually written
require.Eventually(t, func() bool {
return acc.NMetrics() >= uint64(len(expexted))
return acc.NMetrics() >= uint64(len(expected))
}, 3*time.Second, 100*time.Millisecond)

client.close()
plugin.Stop()
testutil.RequireMetricsEqual(t, expexted, acc.GetTelegrafMetrics())
testutil.RequireMetricsEqual(t, expected, acc.GetTelegrafMetrics())
}

func TestStartupErrorBehaviorError(t *testing.T) {
Expand Down Expand Up @@ -341,11 +341,11 @@ func TestStartupErrorBehaviorRetry(t *testing.T) {
"test,source=B value=1i 1712780301000000100",
"test,source=C value=2i 1712780301000000200",
}
expexted := make([]telegraf.Metric, 0, len(metrics))
expected := make([]telegraf.Metric, 0, len(metrics))
for _, x := range metrics {
m, err := parser.Parse([]byte(x))
require.NoError(t, err)
expexted = append(expexted, m...)
expected = append(expected, m...)
}

// Starting the plugin should succeed as we will retry to startup later
Expand Down Expand Up @@ -374,12 +374,12 @@ func TestStartupErrorBehaviorRetry(t *testing.T) {

// Verify that the metrics were actually collected
require.Eventually(t, func() bool {
return acc.NMetrics() >= uint64(len(expexted))
return acc.NMetrics() >= uint64(len(expected))
}, 3*time.Second, 100*time.Millisecond)

client.close()
plugin.Stop()
testutil.RequireMetricsEqual(t, expexted, acc.GetTelegrafMetrics())
testutil.RequireMetricsEqual(t, expected, acc.GetTelegrafMetrics())
}

type producer struct {
Expand Down
4 changes: 2 additions & 2 deletions plugins/inputs/aurora/aurora_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ func TestAurora(t *testing.T) {
},
},
{
name: "float64 unparseable",
name: "float64 unparsable",
leaderhealth: func(_ *testing.T, w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusOK)
},
Expand All @@ -136,7 +136,7 @@ func TestAurora(t *testing.T) {
},
},
{
name: "int64 unparseable",
name: "int64 unparsable",
leaderhealth: func(_ *testing.T, w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusOK)
},
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/bind/bind_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ func TestBindXmlStatsV3(t *testing.T) {
})
}

func TestBindUnparseableURL(t *testing.T) {
func TestBindUnparsableURL(t *testing.T) {
b := Bind{
Urls: []string{"://example.com"},
}
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/bond/bond_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ Partner Churned Count: 0
Slave Interface: eth1
MII Status: down
Speed: Unknown
Duplex: Unkown
Duplex: Unknown
Link Failure Count: 1
Permanent HW addr: 3c:ec:ef:5e:71:59
Slave queue ID: 0
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/ctrlx_datalayer/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details.

## The interval that defines the fastest rate at which the node values should be sampled and values captured. (default: 1s)
## The sampling frequency should be adjusted to the dynamics of the signal to be sampled.
## Higher sampling frequence increases load on ctrlX Data Layer.
## Higher sampling frequencies increases load on ctrlX Data Layer.
## The sampling frequency can be higher, than the publish interval. Captured samples are put in a queue and sent in publish interval.
## Note: The minimum sampling interval can be overruled by a global setting in the ctrlX Data Layer configuration ('datalayer/subscriptions/settings').
# sampling_interval = "1s"
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/ctrlx_datalayer/sample.conf
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@

## The interval that defines the fastest rate at which the node values should be sampled and values captured. (default: 1s)
## The sampling frequency should be adjusted to the dynamics of the signal to be sampled.
## Higher sampling frequence increases load on ctrlX Data Layer.
## Higher sampling frequencies increases load on ctrlX Data Layer.
## The sampling frequency can be higher, than the publish interval. Captured samples are put in a queue and sent in publish interval.
## Note: The minimum sampling interval can be overruled by a global setting in the ctrlX Data Layer configuration ('datalayer/subscriptions/settings').
# sampling_interval = "1s"
Expand Down
6 changes: 3 additions & 3 deletions plugins/inputs/google_cloud_storage/google_cloud_storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ func (gcs *GCS) Gather(acc telegraf.Accumulator) error {

name = attrs.Name

if !gcs.shoudIgnore(name) {
if !gcs.shouldIgnore(name) {
if err := gcs.processMeasurementsInObject(name, bucket, acc); err != nil {
gcs.Log.Errorf("Could not process object %q in bucket %q: %v", name, bucketName, err)
acc.AddError(fmt.Errorf("COULD NOT PROCESS OBJECT %q IN BUCKET %q: %w", name, bucketName, err))
Expand All @@ -119,7 +119,7 @@ func (gcs *GCS) createQuery() storage.Query {
return storage.Query{Prefix: gcs.Prefix}
}

func (gcs *GCS) shoudIgnore(name string) bool {
func (gcs *GCS) shouldIgnore(name string) bool {
return gcs.offSet.OffSet == name || gcs.OffsetKey == name
}

Expand Down Expand Up @@ -159,7 +159,7 @@ func (gcs *GCS) reachedThreshlod(processed int) bool {
}

func (gcs *GCS) updateOffset(bucket *storage.BucketHandle, name string) error {
if gcs.shoudIgnore(name) {
if gcs.shouldIgnore(name) {
return nil
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ func TestRunGatherIteratiosnWithLimit(t *testing.T) {
}

func TestRunGatherIterationWithPages(t *testing.T) {
srv := stateFulGCSServer(t)
srv := stateFullGCSServer(t)
defer srv.Close()

emulatorSetEnv(t, srv)
Expand Down Expand Up @@ -280,7 +280,7 @@ func startMultipleItemGCSServer(t *testing.T) *httptest.Server {
return srv
}

func stateFulGCSServer(t *testing.T) *httptest.Server {
func stateFullGCSServer(t *testing.T) *httptest.Server {
srv := httptest.NewServer(http.NotFoundHandler())

firstElement := parseJSONFromFile(t, "testdata/first_file_listing.json")
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/mongodb/mongostat.go
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ type TransactionStats struct {
TransCheckpoints int64 `bson:"transaction checkpoints"`
}

// WTConnectionStats stores statistices on wiredTiger connections
// WTConnectionStats stores statistics on wiredTiger connections
type WTConnectionStats struct {
FilesCurrentlyOpen int64 `bson:"files currently open"`
}
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/mysql/v2/convert_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ func TestConvertGlobalStatus(t *testing.T) {
}
}

func TestCovertGlobalVariables(t *testing.T) {
func TestConvertGlobalVariables(t *testing.T) {
tests := []struct {
name string
key string
Expand Down
2 changes: 1 addition & 1 deletion plugins/inputs/phpfpm/phpfpm_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ func TestPhpFpmTimeout_From_Fcgi(t *testing.T) {
}

// TestPhpFpmCrashWithTimeout_From_Fcgi show issue #15175: when timeout is enabled
// and nothing is listenning on specified port, a nil pointer was dereferenced.
// and nothing is listening on specified port, a nil pointer was dereferenced.
func TestPhpFpmCrashWithTimeout_From_Fcgi(t *testing.T) {
tcp, err := net.Listen("tcp", "127.0.0.1:0")
require.NoError(t, err, "Cannot initialize test server")
Expand Down
Loading

0 comments on commit 31a1d34

Please sign in to comment.