diff --git a/CHANGELOG.md b/CHANGELOG.md index e0ae77f87b0dd..3439e75ce03f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog +## Unreleased + +### Important Changes + +- Remove useless, all-zero fields in `inputs.procstat`. Up to now, Telegraf + reports the fields `cpu_time_guest`, `cpu_time_guest_nice`, `cpu_time_idle`, + `cpu_time_irq`, `cpu_time_nice`, `cpu_time_soft_irq` and `cpu_time_steal` + which are never set by the underlying library. As a consequence those fields + were always zero. [#14224](https://github.com/influxdata/telegraf/pull/14224) + removes those useless fields. In case you reference them, please adapt your + queries! + ## v1.28.3 [2023-10-23] ### Bugfixes diff --git a/config/types_test.go b/config/types_test.go index 3a0d352fa1ff3..f66dc9b028b8c 100644 --- a/config/types_test.go +++ b/config/types_test.go @@ -29,7 +29,7 @@ func TestConfigDuration(t *testing.T) { p := c.Processors[0].Processor.(*reverse_dns.ReverseDNS) require.EqualValues(t, p.CacheTTL, 3*time.Hour) require.EqualValues(t, p.LookupTimeout, 17*time.Second) - require.Equal(t, p.MaxParallelLookups, 13) + require.Equal(t, 13, p.MaxParallelLookups) require.True(t, p.Ordered) } diff --git a/docs/LICENSE_OF_DEPENDENCIES.md b/docs/LICENSE_OF_DEPENDENCIES.md index 1b289f69fc918..2f6412802055a 100644 --- a/docs/LICENSE_OF_DEPENDENCIES.md +++ b/docs/LICENSE_OF_DEPENDENCIES.md @@ -96,6 +96,7 @@ following works: - github.com/cloudevents/sdk-go [Apache License 2.0](https://github.com/cloudevents/sdk-go/blob/main/LICENSE) - github.com/compose-spec/compose-go [Apache License 2.0](https://github.com/compose-spec/compose-go/blob/master/LICENSE) - github.com/containerd/containerd [Apache License 2.0](https://github.com/containerd/containerd/blob/master/LICENSE) +- github.com/containerd/log [Apache License 2.0](https://github.com/containerd/log/blob/main/LICENSE) - github.com/coocood/freecache [MIT License](https://github.com/coocood/freecache/blob/master/LICENSE) - github.com/coreos/go-semver [Apache License 2.0](https://github.com/coreos/go-semver/blob/main/LICENSE) - github.com/coreos/go-systemd [Apache License 2.0](https://github.com/coreos/go-systemd/blob/main/LICENSE) diff --git a/go.mod b/go.mod index cf177ccff93a7..2294c507be777 100644 --- a/go.mod +++ b/go.mod @@ -165,7 +165,7 @@ require ( github.com/safchain/ethtool v0.3.0 github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 github.com/sensu/sensu-go/api/core/v2 v2.16.0 - github.com/shirou/gopsutil/v3 v3.23.8 + github.com/shirou/gopsutil/v3 v3.23.9 github.com/showwin/speedtest-go v1.6.6 github.com/signalfx/golib/v3 v3.3.53 github.com/sirupsen/logrus v1.9.3 @@ -175,7 +175,7 @@ require ( github.com/srebhan/protobufquery v0.0.0-20230803132024-ae4c0d878e55 github.com/stretchr/testify v1.8.4 github.com/tbrandon/mbserver v0.0.0-20170611213546-993e1772cc62 - github.com/testcontainers/testcontainers-go v0.25.0 + github.com/testcontainers/testcontainers-go v0.26.0 github.com/thomasklein94/packer-plugin-libvirt v0.5.0 github.com/tidwall/gjson v1.14.4 github.com/tinylib/msgp v1.1.8 @@ -249,7 +249,7 @@ require ( github.com/Masterminds/semver v1.5.0 // indirect github.com/Masterminds/semver/v3 v3.2.0 github.com/Microsoft/go-winio v0.6.1 // indirect - github.com/Microsoft/hcsshim v0.11.0 // indirect + github.com/Microsoft/hcsshim v0.11.1 // indirect github.com/alecthomas/participle v0.4.1 // indirect github.com/andybalholm/brotli v1.0.5 // indirect github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df // indirect @@ -284,7 +284,8 @@ require ( github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cloudevents/sdk-go/v2 v2.14.0 github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 // indirect - github.com/containerd/containerd v1.7.6 // indirect + github.com/containerd/containerd v1.7.7 // indirect + github.com/containerd/log v0.1.0 // indirect github.com/couchbase/gomemcached v0.1.3 // indirect github.com/couchbase/goutils v0.1.0 // indirect github.com/cpuguy83/dockercfg v0.3.1 // indirect @@ -385,7 +386,7 @@ require ( github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.1-0.20220423185008-bf980b35cac4 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/moby/patternmatcher v0.5.0 // indirect + github.com/moby/patternmatcher v0.6.0 // indirect github.com/moby/sys/sequential v0.5.0 // indirect github.com/moby/term v0.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect @@ -396,11 +397,11 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/naoina/go-stringutil v0.1.0 // indirect github.com/nats-io/jwt/v2 v2.5.0 // indirect - github.com/nats-io/nkeys v0.4.5 // indirect + github.com/nats-io/nkeys v0.4.6 // indirect github.com/nats-io/nuid v1.0.1 // indirect github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.84.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc4 // indirect + github.com/opencontainers/image-spec v1.1.0-rc5 // indirect github.com/opencontainers/runc v1.1.5 // indirect github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492 // indirect github.com/pborman/uuid v1.2.1 // indirect diff --git a/go.sum b/go.sum index 5f47afaac8545..c90f52717efb3 100644 --- a/go.sum +++ b/go.sum @@ -726,8 +726,8 @@ github.com/Mellanox/rdmamap v1.1.0/go.mod h1:fN+/V9lf10ABnDCwTaXRjeeWijLt2iVLETn github.com/Microsoft/go-winio v0.4.15/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/Microsoft/hcsshim v0.11.0 h1:7EFNIY4igHEXUdj1zXgAyU3fLc7QfOKHbkldRVTBdiM= -github.com/Microsoft/hcsshim v0.11.0/go.mod h1:OEthFdQv/AD2RAdzR6Mm1N1KPCztGKDurW1Z8b8VGMM= +github.com/Microsoft/hcsshim v0.11.1 h1:hJ3s7GbWlGK4YVV92sO88BQSyF4ZLVy7/awqOlPxFbA= +github.com/Microsoft/hcsshim v0.11.1/go.mod h1:nFJmaO4Zr5Y7eADdFOpYswDDlNVbvcIJJNJLECr5JQg= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/PaesslerAG/gval v1.2.2 h1:Y7iBzhgE09IGTt5QgGQ2IdaYYYOU134YGHBThD+wm9E= github.com/PaesslerAG/gval v1.2.2/go.mod h1:XRFLwvmkTEdYziLdaCeCa5ImcGVrfQbeNUbVR+C6xac= @@ -1003,8 +1003,10 @@ github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMe github.com/compose-spec/compose-go v1.20.0 h1:h4ZKOst1EF/DwZp7dWkb+wbTVE4nEyT9Lc89to84Ol4= github.com/compose-spec/compose-go v1.20.0/go.mod h1:+MdqXV4RA7wdFsahh/Kb8U0pAJqkg7mr4PM9tFKU8RM= github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= -github.com/containerd/containerd v1.7.6 h1:oNAVsnhPoy4BTPQivLgTzI9Oleml9l/+eYIDYXRCYo8= -github.com/containerd/containerd v1.7.6/go.mod h1:SY6lrkkuJT40BVNO37tlYTSnKJnP5AXBc0fhx0q+TJ4= +github.com/containerd/containerd v1.7.7 h1:QOC2K4A42RQpcrZyptP6z9EJZnlHfHJUfZrAAHe15q4= +github.com/containerd/containerd v1.7.7/go.mod h1:3c4XZv6VeT9qgf9GMTxNTMFxGJrGpI2vz1yk4ye+YY8= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= github.com/coocood/freecache v1.2.3 h1:lcBwpZrwBZRZyLk/8EMyQVXRiFl663cCuMOrjCALeto= github.com/coocood/freecache v1.2.3/go.mod h1:RBUWa/Cy+OHdfTGFEhEuE1pMCMX51Ncizj7rthiQ3vk= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= @@ -1816,8 +1818,8 @@ github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx github.com/moby/ipvs v1.1.0 h1:ONN4pGaZQgAx+1Scz5RvWV4Q7Gb+mvfRh3NsPS+1XQQ= github.com/moby/ipvs v1.1.0/go.mod h1:4VJMWuf098bsUMmZEiD4Tjk/O7mOn3l1PTD3s4OoYAs= github.com/moby/moby v24.0.6+incompatible/go.mod h1:fDXVQ6+S340veQPv35CzDahGBmHsiclFwfEygB/TWMc= -github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M7DBo= -github.com/moby/patternmatcher v0.5.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= github.com/moby/sys/mountinfo v0.5.0/go.mod h1:3bMD3Rg+zkqx8MRYPi7Pyb0Ie97QEBmdxbhnCLlSvSU= github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= @@ -1852,8 +1854,8 @@ github.com/nats-io/nats-server/v2 v2.9.23 h1:6Wj6H6QpP9FMlpCyWUaNu2yeZ/qGj+mdRkZ github.com/nats-io/nats-server/v2 v2.9.23/go.mod h1:wEjrEy9vnqIGE4Pqz4/c75v9Pmaq7My2IgFmnykc4C0= github.com/nats-io/nats.go v1.31.0 h1:/WFBHEc/dOKBF6qf1TZhrdEfTmOZ5JzdJ+Y3m6Y/p7E= github.com/nats-io/nats.go v1.31.0/go.mod h1:di3Bm5MLsoB4Bx61CBTsxuarI36WbhAwOm8QrW39+i8= -github.com/nats-io/nkeys v0.4.5 h1:Zdz2BUlFm4fJlierwvGK+yl20IAKUm7eV6AAZXEhkPk= -github.com/nats-io/nkeys v0.4.5/go.mod h1:XUkxdLPTufzlihbamfzQ7mw/VGx6ObUs+0bN5sNvt64= +github.com/nats-io/nkeys v0.4.6 h1:IzVe95ru2CT6ta874rt9saQRkWfe2nFj1NtvYSLqMzY= +github.com/nats-io/nkeys v0.4.6/go.mod h1:4DxZNzenSVd1cYQoAa8948QY3QDjrHfcfVADymtkpts= github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/netsampler/goflow2 v1.3.6 h1:fZbHDcWPcG+nkg2wGHCv4VJ9MrG8iA16YmuYhrSAEdQ= @@ -1911,8 +1913,8 @@ github.com/openconfig/ygot v0.20.0/go.mod h1:7ZiBFNc4n/1Hkv2v2dAEpxisqDznp0JVpLR github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= -github.com/opencontainers/image-spec v1.1.0-rc4 h1:oOxKUJWnFC4YGHCCMNql1x4YaDfYBTS5Y4x/Cgeo1E0= -github.com/opencontainers/image-spec v1.1.0-rc4/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI= +github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= github.com/opencontainers/runc v1.1.5 h1:L44KXEpKmfWDcS02aeGm8QNTFXTo2D+8MYGDIJ/GDEs= github.com/opencontainers/runc v1.1.5/go.mod h1:1J5XiS+vdZ3wCyZybsuxXZWGrgSr8fFJHLXuG2PsnNg= github.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= @@ -2067,8 +2069,8 @@ github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod github.com/sensu/sensu-go/api/core/v2 v2.16.0 h1:HOq4rFkQ1S5ZjxmMTLc5J5mAbECrnKWvtXXbMqr3j9s= github.com/sensu/sensu-go/api/core/v2 v2.16.0/go.mod h1:MjM7+MCGEyTAgaZ589SiGHwYiaYF7N/58dU0J070u/0= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shirou/gopsutil/v3 v3.23.8 h1:xnATPiybo6GgdRoC4YoGnxXZFRc3dqQTGi73oLvvBrE= -github.com/shirou/gopsutil/v3 v3.23.8/go.mod h1:7hmCaBn+2ZwaZOr6jmPBZDfawwMGuo1id3C6aM8EDqQ= +github.com/shirou/gopsutil/v3 v3.23.9 h1:ZI5bWVeu2ep4/DIxB4U9okeYJ7zp/QLTO4auRb/ty/E= +github.com/shirou/gopsutil/v3 v3.23.9/go.mod h1:x/NWSb71eMcjFIO0vhyGW5nZ7oSIgVjrCnADckb85GA= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= @@ -2160,8 +2162,8 @@ github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG github.com/tbrandon/mbserver v0.0.0-20170611213546-993e1772cc62 h1:Oj2e7Sae4XrOsk3ij21QjjEgAcVSeo9nkp0dI//cD2o= github.com/tbrandon/mbserver v0.0.0-20170611213546-993e1772cc62/go.mod h1:qUzPVlSj2UgxJkVbH0ZwuuiR46U8RBMDT5KLY78Ifpw= github.com/tedsuo/ifrit v0.0.0-20180802180643-bea94bb476cc/go.mod h1:eyZnKCc955uh98WQvzOm0dgAeLnf2O0Rz0LPoC5ze+0= -github.com/testcontainers/testcontainers-go v0.25.0 h1:erH6cQjsaJrH+rJDU9qIf89KFdhK0Bft0aEZHlYC3Vs= -github.com/testcontainers/testcontainers-go v0.25.0/go.mod h1:4sC9SiJyzD1XFi59q8umTQYWxnkweEc5OjVtTUlJzqQ= +github.com/testcontainers/testcontainers-go v0.26.0 h1:uqcYdoOHBy1ca7gKODfBd9uTHVK3a7UL848z09MVZ0c= +github.com/testcontainers/testcontainers-go v0.26.0/go.mod h1:ICriE9bLX5CLxL9OFQ2N+2N+f+803LNJ1utJb1+Inx0= github.com/thomasklein94/packer-plugin-libvirt v0.5.0 h1:aj2HLHZZM/ClGLIwVp9rrgh+2TOU/w4EiaZHAwCpOgs= github.com/thomasklein94/packer-plugin-libvirt v0.5.0/go.mod h1:GwN82FQ6KxCNKtS8LNUgLbwTZs90GGhBzCmTNkrTCrY= github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM= @@ -2693,6 +2695,7 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= diff --git a/plugins/common/opcua/input/input_client_test.go b/plugins/common/opcua/input/input_client_test.go index dac2ab5e7ee88..a55735f616270 100644 --- a/plugins/common/opcua/input/input_client_test.go +++ b/plugins/common/opcua/input/input_client_test.go @@ -7,20 +7,21 @@ import ( "time" "github.com/gopcua/opcua/ua" + "github.com/stretchr/testify/require" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/plugins/common/opcua" "github.com/influxdata/telegraf/testutil" - "github.com/stretchr/testify/require" ) func TestTagsSliceToMap(t *testing.T) { m, err := tagsSliceToMap([][]string{{"foo", "bar"}, {"baz", "bat"}}) require.NoError(t, err) require.Len(t, m, 2) - require.Equal(t, m["foo"], "bar") - require.Equal(t, m["baz"], "bat") + require.Equal(t, "bar", m["foo"]) + require.Equal(t, "bat", m["baz"]) } func TestTagsSliceToMap_twoStrings(t *testing.T) { @@ -316,7 +317,7 @@ func TestNewNodeMetricMappingIdStrInstantiated(t *testing.T) { TagsSlice: [][]string{}, }, map[string]string{}) require.NoError(t, err) - require.Equal(t, nmm.idStr, "ns=2;s=h") + require.Equal(t, "ns=2;s=h", nmm.idStr) } func TestValidateNodeToAdd(t *testing.T) { diff --git a/plugins/inputs/beat/beat_test.go b/plugins/inputs/beat/beat_test.go index 2ca2ffba0a149..fae8818f49f3d 100644 --- a/plugins/inputs/beat/beat_test.go +++ b/plugins/inputs/beat/beat_test.go @@ -9,8 +9,9 @@ import ( "os" "testing" - "github.com/influxdata/telegraf/testutil" "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/testutil" ) func Test_BeatStats(t *testing.T) { @@ -177,10 +178,10 @@ func Test_BeatRequest(t *testing.T) { data, err := os.ReadFile(jsonFilePath) require.NoErrorf(t, err, "could not read from data file %s", jsonFilePath) - require.Equal(t, request.Host, "beat.test.local") - require.Equal(t, request.Method, "POST") - require.Equal(t, request.Header.Get("Authorization"), "Basic YWRtaW46UFdE") - require.Equal(t, request.Header.Get("X-Test"), "test-value") + require.Equal(t, "beat.test.local", request.Host) + require.Equal(t, "POST", request.Method) + require.Equal(t, "Basic YWRtaW46UFdE", request.Header.Get("Authorization")) + require.Equal(t, "test-value", request.Header.Get("X-Test")) _, err = w.Write(data) require.NoError(t, err, "could not write data") diff --git a/plugins/inputs/ceph/ceph_test.go b/plugins/inputs/ceph/ceph_test.go index 340f5e58d38df..a782319ea0944 100644 --- a/plugins/inputs/ceph/ceph_test.go +++ b/plugins/inputs/ceph/ceph_test.go @@ -25,7 +25,7 @@ type expectedResult struct { func TestParseSockId(t *testing.T) { s := parseSockID(sockFile(osdPrefix, 1), osdPrefix, sockSuffix) - require.Equal(t, s, "1") + require.Equal(t, "1", s) } func TestParseMonDump(t *testing.T) { diff --git a/plugins/inputs/cgroup/cgroup_linux.go b/plugins/inputs/cgroup/cgroup_linux.go index 2b73273e44966..780d356295466 100644 --- a/plugins/inputs/cgroup/cgroup_linux.go +++ b/plugins/inputs/cgroup/cgroup_linux.go @@ -109,6 +109,8 @@ func (g *CGroup) generateDirs(list chan<- pathInfo) { } func (g *CGroup) generateFiles(dir string, list chan<- pathInfo) { + dir = strings.Replace(dir, "\\", "\\\\", -1) + defer close(list) for _, file := range g.Files { // getting all file paths that match the pattern 'dir + file' diff --git a/plugins/inputs/cgroup/cgroup_test.go b/plugins/inputs/cgroup/cgroup_test.go index ead3bac6aaebd..d30cebd837a39 100644 --- a/plugins/inputs/cgroup/cgroup_test.go +++ b/plugins/inputs/cgroup/cgroup_test.go @@ -416,3 +416,29 @@ func TestCgroupStatistics_8(t *testing.T) { require.NoError(t, acc.GatherError(cg.Gather)) require.Len(t, cg.logged, 1) } + +func TestCgroupEscapeDir(t *testing.T) { + var acc testutil.Accumulator + var cg = &CGroup{ + Paths: []string{"testdata/backslash/machine-qemu\x2d1\x2d*"}, + Files: []string{"cpu.stat"}, + logged: make(map[string]bool), + } + + expected := []telegraf.Metric{ + metric.New( + "cgroup", + map[string]string{"path": `testdata/backslash/machine-qemu-1-ubuntu`}, + map[string]interface{}{ + "cpu.stat.core_sched.force_idle_usec": int64(0), + "cpu.stat.system_usec": int64(103537582650), + "cpu.stat.usage_usec": int64(614953149468), + "cpu.stat.user_usec": int64(511415566817), + }, + time.Unix(0, 0), + ), + } + + require.NoError(t, acc.GatherError(cg.Gather)) + testutil.RequireMetricsEqual(t, expected, acc.GetTelegrafMetrics(), testutil.IgnoreTime()) +} diff --git a/plugins/inputs/cgroup/testdata/backslash/machine-qemu-1-ubuntu/cpu.stat b/plugins/inputs/cgroup/testdata/backslash/machine-qemu-1-ubuntu/cpu.stat new file mode 100644 index 0000000000000..de2e627c1ac22 --- /dev/null +++ b/plugins/inputs/cgroup/testdata/backslash/machine-qemu-1-ubuntu/cpu.stat @@ -0,0 +1,4 @@ +usage_usec 614953149468 +user_usec 511415566817 +system_usec 103537582650 +core_sched.force_idle_usec 0 diff --git a/plugins/inputs/cisco_telemetry_mdt/cisco_telemetry_mdt_test.go b/plugins/inputs/cisco_telemetry_mdt/cisco_telemetry_mdt_test.go index b0ff020d9100d..3d6ab0b634b90 100644 --- a/plugins/inputs/cisco_telemetry_mdt/cisco_telemetry_mdt_test.go +++ b/plugins/inputs/cisco_telemetry_mdt/cisco_telemetry_mdt_test.go @@ -1005,7 +1005,7 @@ func TestTCPDialoutMultiple(t *testing.T) { require.NoError(t, conn.Close()) // We use the invalid dialout flags to let the server close the connection - require.Equal(t, acc.Errors, []error{errors.New("invalid dialout flags: 257"), errors.New("invalid dialout flags: 257")}) + require.Equal(t, []error{errors.New("invalid dialout flags: 257"), errors.New("invalid dialout flags: 257")}, acc.Errors) tags := map[string]string{ "path": "type:model/some/path", @@ -1060,7 +1060,7 @@ func TestGRPCDialoutError(t *testing.T) { require.True(t, err == nil || errors.Is(err, io.EOF)) c.Stop() - require.Equal(t, acc.Errors, []error{errors.New("GRPC dialout error: foobar")}) + require.Equal(t, []error{errors.New("GRPC dialout error: foobar")}, acc.Errors) } func TestGRPCDialoutMultiple(t *testing.T) { @@ -1119,7 +1119,7 @@ func TestGRPCDialoutMultiple(t *testing.T) { c.Stop() require.NoError(t, conn.Close()) - require.Equal(t, acc.Errors, []error{errors.New("GRPC dialout error: testclose"), errors.New("GRPC dialout error: testclose")}) + require.Equal(t, []error{errors.New("GRPC dialout error: testclose"), errors.New("GRPC dialout error: testclose")}, acc.Errors) tags := map[string]string{ "path": "type:model/some/path", diff --git a/plugins/inputs/cloud_pubsub/cloud_pubsub_test.go b/plugins/inputs/cloud_pubsub/cloud_pubsub_test.go index 6da36a2c5f2f1..fb15b6ec47d5a 100644 --- a/plugins/inputs/cloud_pubsub/cloud_pubsub_test.go +++ b/plugins/inputs/cloud_pubsub/cloud_pubsub_test.go @@ -56,7 +56,7 @@ func TestRunParse(t *testing.T) { sub.messages <- msg acc.Wait(1) - require.Equal(t, acc.NFields(), 1) + require.Equal(t, 1, acc.NFields()) metric := acc.Metrics[0] validateTestInfluxMetric(t, metric) } @@ -102,7 +102,7 @@ func TestRunBase64(t *testing.T) { sub.messages <- msg acc.Wait(1) - require.Equal(t, acc.NFields(), 1) + require.Equal(t, 1, acc.NFields()) metric := acc.Metrics[0] validateTestInfluxMetric(t, metric) } @@ -151,7 +151,7 @@ func TestRunGzipDecode(t *testing.T) { } sub.messages <- msg acc.Wait(1) - require.Equal(t, acc.NFields(), 1) + require.Equal(t, 1, acc.NFields()) metric := acc.Metrics[0] validateTestInfluxMetric(t, metric) } @@ -200,7 +200,7 @@ func TestRunInvalidMessages(t *testing.T) { // Make sure we acknowledged message so we don't receive it again. testTracker.WaitForAck(1) - require.Equal(t, acc.NFields(), 0) + require.Equal(t, 0, acc.NFields()) } func TestRunOverlongMessages(t *testing.T) { @@ -249,7 +249,7 @@ func TestRunOverlongMessages(t *testing.T) { // Make sure we acknowledged message so we don't receive it again. testTracker.WaitForAck(1) - require.Equal(t, acc.NFields(), 0) + require.Equal(t, 0, acc.NFields()) } func TestRunErrorInSubscriber(t *testing.T) { diff --git a/plugins/inputs/exec/exec_test.go b/plugins/inputs/exec/exec_test.go index 3ac20f0feac05..edf8ef5583482 100644 --- a/plugins/inputs/exec/exec_test.go +++ b/plugins/inputs/exec/exec_test.go @@ -98,7 +98,7 @@ func TestExec(t *testing.T) { var acc testutil.Accumulator err := acc.GatherError(e.Gather) require.NoError(t, err) - require.Equal(t, acc.NFields(), 8, "non-numeric measurements should be ignored") + require.Equal(t, 8, acc.NFields(), "non-numeric measurements should be ignored") fields := map[string]interface{}{ "num_processes": float64(82), @@ -125,7 +125,7 @@ func TestExecMalformed(t *testing.T) { var acc testutil.Accumulator require.Error(t, acc.GatherError(e.Gather)) - require.Equal(t, acc.NFields(), 0, "No new points should have been added") + require.Equal(t, 0, acc.NFields(), "No new points should have been added") } func TestCommandError(t *testing.T) { @@ -140,7 +140,7 @@ func TestCommandError(t *testing.T) { var acc testutil.Accumulator require.Error(t, acc.GatherError(e.Gather)) - require.Equal(t, acc.NFields(), 0, "No new points should have been added") + require.Equal(t, 0, acc.NFields(), "No new points should have been added") } func TestExecCommandWithGlob(t *testing.T) { diff --git a/plugins/inputs/gnmi/README.md b/plugins/inputs/gnmi/README.md index dcb6cabe717c3..6802f1d11d128 100644 --- a/plugins/inputs/gnmi/README.md +++ b/plugins/inputs/gnmi/README.md @@ -61,6 +61,10 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details. ## Remove leading slashes and dots in field-name # trim_field_names = false + ## Guess the path-tag if an update does not contain a prefix-path + ## If enabled, the common-path of all elements in the update is used. + # guess_path_tag = false + ## enable client-side TLS and define CA to authenticate the device # enable_tls = false # tls_ca = "/etc/telegraf/ca.pem" diff --git a/plugins/inputs/gnmi/gnmi.go b/plugins/inputs/gnmi/gnmi.go index 52f4d600b904d..f46530494879b 100644 --- a/plugins/inputs/gnmi/gnmi.go +++ b/plugins/inputs/gnmi/gnmi.go @@ -5,8 +5,6 @@ import ( "context" _ "embed" "fmt" - "path" - "regexp" "strings" "sync" "time" @@ -25,9 +23,6 @@ import ( //go:embed sample.conf var sampleConfig string -// Regular expression to see if a path element contains an origin -var originPattern = regexp.MustCompile(`^([\w-_]+):`) - // Define the warning to show if we cannot get a metric name. const emptyNameWarning = `Got empty metric-name for response, usually indicating configuration issues as the response cannot be related to any subscription. @@ -58,12 +53,13 @@ type GNMI struct { Trace bool `toml:"dump_responses"` CanonicalFieldNames bool `toml:"canonical_field_names"` TrimFieldNames bool `toml:"trim_field_names"` + GuessPathTag bool `toml:"guess_path_tag"` EnableTLS bool `toml:"enable_tls" deprecated:"1.27.0;use 'tls_enable' instead"` Log telegraf.Logger `toml:"-"` internaltls.ClientConfig // Internal state - internalAliases map[string]string + internalAliases map[*pathInfo]string cancel context.CancelFunc wg sync.WaitGroup } @@ -169,7 +165,7 @@ func (c *GNMI) Init() error { } // Invert explicit alias list and prefill subscription names - c.internalAliases = make(map[string]string, len(c.Subscriptions)+len(c.Aliases)+len(c.TagSubscriptions)) + c.internalAliases = make(map[*pathInfo]string, len(c.Subscriptions)+len(c.Aliases)+len(c.TagSubscriptions)) for _, s := range c.Subscriptions { if err := s.buildAlias(c.internalAliases); err != nil { return err @@ -181,7 +177,7 @@ func (c *GNMI) Init() error { } } for alias, encodingPath := range c.Aliases { - c.internalAliases[encodingPath] = alias + c.internalAliases[newInfoFromString(encodingPath)] = alias } c.Log.Debugf("Internal alias mapping: %+v", c.internalAliases) @@ -224,6 +220,7 @@ func (c *GNMI) Start(acc telegraf.Accumulator) error { trace: c.Trace, canonicalFieldNames: c.CanonicalFieldNames, trimSlash: c.TrimFieldNames, + guessPathTag: c.GuessPathTag, log: c.Log, } for ctx.Err() == nil { @@ -362,26 +359,21 @@ func (s *Subscription) buildFullPath(c *GNMI) error { return nil } -func (s *Subscription) buildAlias(aliases map[string]string) error { +func (s *Subscription) buildAlias(aliases map[*pathInfo]string) error { // Build the subscription path without keys - gnmiPath, err := parsePath(s.Origin, s.Path, "") + path, err := parsePath(s.Origin, s.Path, "") if err != nil { return err } - - origin, spath, _, err := handlePath(gnmiPath, nil, nil, "") - if err != nil { - return fmt.Errorf("handling path failed: %w", err) - } + info := newInfoFromPathWithoutKeys(path) // If the user didn't provide a measurement name, use last path element name := s.Name - if name == "" { - name = path.Base(spath) + if name == "" && len(info.segments) > 0 { + name = info.segments[len(info.segments)-1] } if name != "" { - aliases[origin+spath] = name - aliases[spath] = name + aliases[info] = name } return nil } diff --git a/plugins/inputs/gnmi/handler.go b/plugins/inputs/gnmi/handler.go index 861c87bf00238..81e8f04f48b51 100644 --- a/plugins/inputs/gnmi/handler.go +++ b/plugins/inputs/gnmi/handler.go @@ -8,6 +8,7 @@ import ( "io" "net" "path" + "sort" "strconv" "strings" "time" @@ -31,7 +32,7 @@ const eidJuniperTelemetryHeader = 1 type handler struct { address string - aliases map[string]string + aliases map[*pathInfo]string tagsubs []TagSubscription maxMsgSize int emptyNameWarnShown bool @@ -40,6 +41,7 @@ type handler struct { trace bool canonicalFieldNames bool trimSlash bool + guessPathTag bool log telegraf.Logger } @@ -117,74 +119,70 @@ func (h *handler) subscribeGNMI(ctx context.Context, acc telegraf.Accumulator, t // Handle SubscribeResponse_Update message from gNMI and parse contained telemetry data func (h *handler) handleSubscribeResponseUpdate(acc telegraf.Accumulator, response *gnmiLib.SubscribeResponse_Update, extension []*gnmiExt.Extension) { - var prefix, prefixAliasPath string grouper := metric.NewSeriesGrouper() timestamp := time.Unix(0, response.Update.Timestamp) - prefixTags := make(map[string]string) - // iter on each extension + // Extract tags from potential extension in the update notification + headerTags := make(map[string]string) for _, ext := range extension { currentExt := ext.GetRegisteredExt().Msg if currentExt == nil { break } - // extension ID + switch ext.GetRegisteredExt().Id { - // Juniper Header extention - //EID_JUNIPER_TELEMETRY_HEADER = 1; case eidJuniperTelemetryHeader: + // Juniper Header extention // Decode it only if user requested it if choice.Contains("juniper_header", h.vendorExt) { juniperHeader := &jnprHeader.GnmiJuniperTelemetryHeaderExtension{} - // unmarshal extention - err := proto.Unmarshal(currentExt, juniperHeader) - if err != nil { + if err := proto.Unmarshal(currentExt, juniperHeader); err != nil { h.log.Errorf("unmarshal gnmi Juniper Header extension failed: %v", err) - break + } else { + // Add only relevant Tags from the Juniper Header extension. + // These are required for aggregation + headerTags["component_id"] = strconv.FormatUint(uint64(juniperHeader.GetComponentId()), 10) + headerTags["component"] = juniperHeader.GetComponent() + headerTags["sub_component_id"] = strconv.FormatUint(uint64(juniperHeader.GetSubComponentId()), 10) } - // Add only relevant Tags from the Juniper Header extension. - // These are required for aggregation - prefixTags["component_id"] = strconv.FormatUint(uint64(juniperHeader.GetComponentId()), 10) - prefixTags["component"] = juniperHeader.GetComponent() - prefixTags["sub_component_id"] = strconv.FormatUint(uint64(juniperHeader.GetSubComponentId()), 10) } - default: continue } } - if response.Update.Prefix != nil { - var origin string - var err error - if origin, prefix, prefixAliasPath, err = handlePath(response.Update.Prefix, prefixTags, h.aliases, ""); err != nil { - h.log.Errorf("Handling path %q failed: %v", response.Update.Prefix, err) - } - prefix = origin + prefix - } + // Extract the path part valid for the whole set of updates if any + prefix := newInfoFromPath(response.Update.Prefix) - prefixTags["source"], _, _ = net.SplitHostPort(h.address) - if prefix != "" { - prefixTags["path"] = prefix + // Add info to the tags + headerTags["source"], _, _ = net.SplitHostPort(h.address) + if !prefix.empty() { + headerTags["path"] = prefix.String() } - // Process and remove tag-updates from the response first so we will + // Process and remove tag-updates from the response first so we can // add all available tags to the metrics later. - var valueUpdates []*gnmiLib.Update + var valueFields []updateField for _, update := range response.Update.Update { - fullPath := pathWithPrefix(response.Update.Prefix, update.Path) + fullPath := prefix.append(update.Path) + fields, err := newFieldsFromUpdate(fullPath, update) + if err != nil { + h.log.Errorf("Processing update %v failed: %v", update, err) + } // Prepare tags from prefix - tags := make(map[string]string, len(prefixTags)) - for key, val := range prefixTags { + tags := make(map[string]string, len(headerTags)) + for key, val := range headerTags { + tags[key] = val + } + for key, val := range fullPath.Tags() { tags[key] = val } - _, fields := h.handleTelemetryField(update, tags, prefix) - + // TODO: Handle each field individually to allow in-JSON tags var tagUpdate bool for _, tagSub := range h.tagsubs { - if !equalPathNoKeys(fullPath, tagSub.fullPath) { + if !fullPath.equalsPathNoKeys(tagSub.fullPath) { continue } h.log.Debugf("Tag-subscription update for %q: %+v", tagSub.Name, update) @@ -195,77 +193,71 @@ func (h *handler) handleSubscribeResponseUpdate(acc telegraf.Accumulator, respon break } if !tagUpdate { - valueUpdates = append(valueUpdates, update) + valueFields = append(valueFields, fields...) } } - // Parse individual Update message and create measurements - var name, lastAliasPath string - for _, update := range valueUpdates { - fullPath := pathWithPrefix(response.Update.Prefix, update.Path) + // Some devices do not provide a prefix, so do some guesswork based + // on the paths of the fields + if headerTags["path"] == "" && h.guessPathTag { + if prefixPath := guessPrefixFromUpdate(valueFields); prefixPath != "" { + headerTags["path"] = prefixPath + } + } + // Parse individual update message and create measurements + for _, field := range valueFields { // Prepare tags from prefix - tags := make(map[string]string, len(prefixTags)) - for key, val := range prefixTags { + fieldTags := field.path.Tags() + tags := make(map[string]string, len(headerTags)+len(fieldTags)) + for key, val := range headerTags { + tags[key] = val + } + for key, val := range fieldTags { tags[key] = val } - - aliasPath, fields := h.handleTelemetryField(update, tags, prefix) // Add the tags derived via tag-subscriptions - for k, v := range h.tagStore.lookup(fullPath, tags) { + for k, v := range h.tagStore.lookup(field.path, tags) { tags[k] = v } - // Inherent valid alias from prefix parsing - if len(prefixAliasPath) > 0 && len(aliasPath) == 0 { - aliasPath = prefixAliasPath - } - - // Lookup alias if alias-path has changed - if aliasPath != lastAliasPath { - name = prefix - if alias, ok := h.aliases[aliasPath]; ok { - name = alias - } else { - h.log.Debugf("No measurement alias for gNMI path: %s", name) + // Lookup alias for the metric + aliasPath, name := h.lookupAlias(field.path) + if name == "" { + h.log.Debugf("No measurement alias for gNMI path: %s", field.path) + if !h.emptyNameWarnShown { + h.log.Warnf(emptyNameWarning, response.Update) + h.emptyNameWarnShown = true } - lastAliasPath = aliasPath - } - - // Check for empty names - if name == "" && !h.emptyNameWarnShown { - h.log.Warnf(emptyNameWarning, response.Update) - h.emptyNameWarnShown = true } // Group metrics - for k, v := range fields { - key := k - if h.canonicalFieldNames { - // Strip the origin is any for the field names - if parts := strings.SplitN(key, ":", 2); len(parts) == 2 { - key = parts[1] - } - } else { - if len(aliasPath) < len(key) && len(aliasPath) != 0 { - // This may not be an exact prefix, due to naming style - // conversion on the key. - key = key[len(aliasPath)+1:] - } else if len(aliasPath) >= len(key) { - // Otherwise use the last path element as the field key. - key = path.Base(key) - } + fieldPath := field.path.String() + key := strings.ReplaceAll(fieldPath, "-", "_") + if h.canonicalFieldNames { + // Strip the origin is any for the field names + if parts := strings.SplitN(key, ":", 2); len(parts) == 2 { + key = parts[1] } - if h.trimSlash { - key = strings.TrimLeft(key, "/.") + } else { + if len(aliasPath) < len(key) && len(aliasPath) != 0 { + // This may not be an exact prefix, due to naming style + // conversion on the key. + key = key[len(aliasPath)+1:] + } else if len(aliasPath) >= len(key) { + // Otherwise use the last path element as the field key. + key = path.Base(key) } - if key == "" { - h.log.Errorf("Invalid empty path: %q", k) - continue - } - grouper.Add(name, tags, timestamp, key, v) } + if h.trimSlash { + key = strings.TrimLeft(key, "/.") + } + if key == "" { + h.log.Errorf("Invalid empty path %q with alias %q", fieldPath, aliasPath) + continue + } + grouper.Add(name, tags, timestamp, key, field.value) } // Add grouped measurements @@ -274,15 +266,48 @@ func (h *handler) handleSubscribeResponseUpdate(acc telegraf.Accumulator, respon } } -// HandleTelemetryField and add it to a measurement -func (h *handler) handleTelemetryField(update *gnmiLib.Update, tags map[string]string, prefix string) (string, map[string]interface{}) { - _, gpath, aliasPath, err := handlePath(update.Path, tags, h.aliases, prefix) - if err != nil { - h.log.Errorf("Handling path %q failed: %v", update.Path, err) +// Try to find the alias for the given path +type aliasCandidate struct { + path, alias string +} + +func (h *handler) lookupAlias(info *pathInfo) (aliasPath, alias string) { + candidates := make([]aliasCandidate, 0) + for i, a := range h.aliases { + if !i.isSubPathOf(info) { + continue + } + candidates = append(candidates, aliasCandidate{i.String(), a}) } - fields, err := gnmiToFields(strings.Replace(gpath, "-", "_", -1), update.Val) - if err != nil { - h.log.Errorf("Error parsing update value %q: %v", update.Val, err) + if len(candidates) == 0 { + return "", "" + } + + // Reverse sort the candidates by path length so we can use the longest match + sort.SliceStable(candidates, func(i, j int) bool { + return len(candidates[i].path) > len(candidates[j].path) + }) + + return candidates[0].path, candidates[0].alias +} + +func guessPrefixFromUpdate(fields []updateField) string { + if len(fields) == 0 { + return "" + } + if len(fields) == 1 { + dir, _ := fields[0].path.split() + return dir + } + commonPath := &pathInfo{ + origin: fields[0].path.origin, + segments: append([]string{}, fields[0].path.segments...), + } + for _, f := range fields[1:] { + commonPath.keepCommonPart(f.path) + } + if commonPath.empty() { + return "" } - return aliasPath, fields + return commonPath.String() } diff --git a/plugins/inputs/gnmi/path.go b/plugins/inputs/gnmi/path.go new file mode 100644 index 0000000000000..e00af0dd63946 --- /dev/null +++ b/plugins/inputs/gnmi/path.go @@ -0,0 +1,291 @@ +package gnmi + +import ( + "regexp" + "strings" + + gnmiLib "github.com/openconfig/gnmi/proto/gnmi" +) + +// Regular expression to see if a path element contains an origin +var originPattern = regexp.MustCompile(`^([\w-_]+):`) + +type keySegment struct { + name string + path string + kv map[string]string +} + +type pathInfo struct { + origin string + target string + segments []string + keyValues []keySegment +} + +func newInfoFromString(path string) *pathInfo { + if path == "" { + return &pathInfo{} + } + + info := &pathInfo{} + for _, s := range strings.Split(path, "/") { + if s != "" { + info.segments = append(info.segments, s) + } + } + info.normalize() + + return info +} + +func newInfoFromPathWithoutKeys(path *gnmiLib.Path) *pathInfo { + info := &pathInfo{ + origin: path.Origin, + segments: make([]string, 0, len(path.Elem)), + } + for _, elem := range path.Elem { + if elem.Name == "" { + continue + } + info.segments = append(info.segments, elem.Name) + } + info.normalize() + + return info +} + +func newInfoFromPath(paths ...*gnmiLib.Path) *pathInfo { + if len(paths) == 0 { + return nil + } + + info := &pathInfo{} + if paths[0] != nil { + info.origin = paths[0].Origin + info.target = paths[0].Target + } + + for _, p := range paths { + if p == nil { + continue + } + for _, elem := range p.Elem { + if elem.Name == "" { + continue + } + info.segments = append(info.segments, elem.Name) + + if len(elem.Key) == 0 { + continue + } + keyInfo := keySegment{ + name: elem.Name, + path: info.String(), + kv: make(map[string]string, len(elem.Key)), + } + for k, v := range elem.Key { + keyInfo.kv[k] = v + } + info.keyValues = append(info.keyValues, keyInfo) + } + } + info.normalize() + + return info +} + +func (pi *pathInfo) empty() bool { + return len(pi.segments) == 0 +} + +func (pi *pathInfo) append(paths ...*gnmiLib.Path) *pathInfo { + // Copy the existing info + path := &pathInfo{ + origin: pi.origin, + target: pi.target, + segments: append([]string{}, pi.segments...), + keyValues: make([]keySegment, 0, len(pi.keyValues)), + } + for _, elem := range pi.keyValues { + keyInfo := keySegment{ + name: elem.name, + path: elem.path, + kv: make(map[string]string, len(elem.kv)), + } + for k, v := range elem.kv { + keyInfo.kv[k] = v + } + path.keyValues = append(path.keyValues, keyInfo) + } + + // Add the new segments + for _, p := range paths { + for _, elem := range p.Elem { + if elem.Name == "" { + continue + } + path.segments = append(path.segments, elem.Name) + + if len(elem.Key) == 0 { + continue + } + keyInfo := keySegment{ + name: elem.Name, + path: path.String(), + kv: make(map[string]string, len(elem.Key)), + } + for k, v := range elem.Key { + keyInfo.kv[k] = v + } + path.keyValues = append(path.keyValues, keyInfo) + } + } + + return path +} + +func (pi *pathInfo) appendSegments(segments ...string) *pathInfo { + // Copy the existing info + path := &pathInfo{ + origin: pi.origin, + target: pi.target, + segments: append([]string{}, pi.segments...), + keyValues: make([]keySegment, 0, len(pi.keyValues)), + } + for _, elem := range pi.keyValues { + keyInfo := keySegment{ + name: elem.name, + path: elem.path, + kv: make(map[string]string, len(elem.kv)), + } + for k, v := range elem.kv { + keyInfo.kv[k] = v + } + path.keyValues = append(path.keyValues, keyInfo) + } + + // Add the new segments + for _, s := range segments { + if s == "" { + continue + } + path.segments = append(path.segments, s) + } + + return path +} + +func (pi *pathInfo) normalize() { + if len(pi.segments) == 0 { + return + } + + // Some devices supply the origin as part of the first path element, + // so try to find and extract it there. + groups := originPattern.FindStringSubmatch(pi.segments[0]) + if len(groups) == 2 { + pi.origin = groups[1] + pi.segments[0] = pi.segments[0][len(groups[1])+1:] + } +} + +func (pi *pathInfo) equalsPathNoKeys(path *gnmiLib.Path) bool { + if len(pi.segments) != len(path.Elem) { + return false + } + for i, s := range pi.segments { + if s != path.Elem[i].Name { + return false + } + } + return true +} + +func (pi *pathInfo) isSubPathOf(path *pathInfo) bool { + // If both set an origin it has to match. Otherwise we ignore the origin + if pi.origin != "" && path.origin != "" && pi.origin != path.origin { + return false + } + + // The "parent" path should have the same length or be shorter than the + // sub-path to have a chance to match + if len(pi.segments) > len(path.segments) { + return false + } + + // Compare the elements and exit if we find a mismatch + for i, p := range pi.segments { + if p != path.segments[i] { + return false + } + } + + return true +} + +func (pi *pathInfo) keepCommonPart(path *pathInfo) { + shortestLen := len(pi.segments) + if len(path.segments) < shortestLen { + shortestLen = len(path.segments) + } + + // Compare the elements and stop as soon as they do mismatch + var matchLen int + for i, p := range pi.segments[:shortestLen] { + if p != path.segments[i] { + break + } + matchLen = i + 1 + } + if matchLen < 1 { + pi.segments = nil + return + } + pi.segments = pi.segments[:matchLen] +} + +func (pi *pathInfo) split() (dir, base string) { + if len(pi.segments) == 0 { + return "", "" + } + if len(pi.segments) == 1 { + return "", pi.segments[0] + } + + dir = "/" + strings.Join(pi.segments[:len(pi.segments)-1], "/") + if pi.origin != "" { + dir = pi.origin + ":" + dir + } + return dir, pi.segments[len(pi.segments)-1] +} + +func (pi *pathInfo) String() string { + if len(pi.segments) == 0 { + return "" + } + + out := "/" + strings.Join(pi.segments, "/") + if pi.origin != "" { + out = pi.origin + ":" + out + } + return out +} + +func (pi *pathInfo) Tags() map[string]string { + tags := make(map[string]string, len(pi.keyValues)) + for _, s := range pi.keyValues { + for k, v := range s.kv { + key := strings.ReplaceAll(k, "-", "_") + + // Use short-form of key if possible + if _, exists := tags[key]; !exists { + tags[key] = v + continue + } + tags[s.path+"/"+key] = v + } + } + + return tags +} diff --git a/plugins/inputs/gnmi/sample.conf b/plugins/inputs/gnmi/sample.conf index 599ea0989dc12..7e330f79d6a35 100644 --- a/plugins/inputs/gnmi/sample.conf +++ b/plugins/inputs/gnmi/sample.conf @@ -22,6 +22,10 @@ ## Remove leading slashes and dots in field-name # trim_field_names = false + ## Guess the path-tag if an update does not contain a prefix-path + ## If enabled, the common-path of all elements in the update is used. + # guess_path_tag = false + ## enable client-side TLS and define CA to authenticate the device # enable_tls = false # tls_ca = "/etc/telegraf/ca.pem" diff --git a/plugins/inputs/gnmi/tag_store.go b/plugins/inputs/gnmi/tag_store.go index a1b2a663cfad9..3576697343003 100644 --- a/plugins/inputs/gnmi/tag_store.go +++ b/plugins/inputs/gnmi/tag_store.go @@ -2,12 +2,10 @@ package gnmi import ( "fmt" - "path/filepath" "sort" "strings" "github.com/influxdata/telegraf/internal" - gnmiLib "github.com/openconfig/gnmi/proto/gnmi" ) type tagStore struct { @@ -40,14 +38,19 @@ func newTagStore(subs []TagSubscription) *tagStore { } // Store tags extracted from TagSubscriptions -func (s *tagStore) insert(subscription TagSubscription, path *gnmiLib.Path, values map[string]interface{}, tags map[string]string) error { +func (s *tagStore) insert(subscription TagSubscription, path *pathInfo, values []updateField, tags map[string]string) error { switch subscription.Match { case "unconditional": - for k, v := range values { - tagName := subscription.Name + "/" + filepath.Base(k) - sv, err := internal.ToString(v) + for _, f := range values { + tagName := subscription.Name + if len(f.path.segments) > 0 { + key := f.path.segments[len(f.path.segments)-1] + key = strings.ReplaceAll(key, "-", "_") + tagName += "/" + key + } + sv, err := internal.ToString(f.value) if err != nil { - return fmt.Errorf("conversion error for %v: %w", v, err) + return fmt.Errorf("conversion error for %v: %w", f.value, err) } if sv == "" { delete(s.unconditional, tagName) @@ -68,11 +71,16 @@ func (s *tagStore) insert(subscription TagSubscription, path *gnmiLib.Path, valu } // Add the values - for k, v := range values { - tagName := subscription.Name + "/" + filepath.Base(k) - sv, err := internal.ToString(v) + for _, f := range values { + tagName := subscription.Name + if len(f.path.segments) > 0 { + key := f.path.segments[len(f.path.segments)-1] + key = strings.ReplaceAll(key, "-", "_") + tagName += "/" + key + } + sv, err := internal.ToString(f.value) if err != nil { - return fmt.Errorf("conversion error for %v: %w", v, err) + return fmt.Errorf("conversion error for %v: %w", f.value, err) } if sv == "" { delete(s.names[key], tagName) @@ -92,11 +100,16 @@ func (s *tagStore) insert(subscription TagSubscription, path *gnmiLib.Path, valu } // Add the values - for k, v := range values { - tagName := subscription.Name + "/" + filepath.Base(k) - sv, err := internal.ToString(v) + for _, f := range values { + tagName := subscription.Name + if len(f.path.segments) > 0 { + key := f.path.segments[len(f.path.segments)-1] + key = strings.ReplaceAll(key, "-", "_") + tagName += "/" + key + } + sv, err := internal.ToString(f.value) if err != nil { - return fmt.Errorf("conversion error for %v: %w", v, err) + return fmt.Errorf("conversion error for %v: %w", f.value, err) } if sv == "" { delete(s.elements.tags[key], tagName) @@ -111,7 +124,7 @@ func (s *tagStore) insert(subscription TagSubscription, path *gnmiLib.Path, valu return nil } -func (s *tagStore) lookup(path *gnmiLib.Path, metricTags map[string]string) map[string]string { +func (s *tagStore) lookup(path *pathInfo, metricTags map[string]string) map[string]string { // Add all unconditional tags tags := make(map[string]string, len(s.unconditional)) for k, v := range s.unconditional { @@ -140,9 +153,7 @@ func (s *tagStore) lookup(path *gnmiLib.Path, metricTags map[string]string) map[ return tags } -func (s *tagStore) getElementsKeys(path *gnmiLib.Path, elements []string) (string, bool) { - keyElements := pathKeys(path) - +func (s *tagStore) getElementsKeys(path *pathInfo, elements []string) (string, bool) { // Search for the required path elements and collect a ordered // list of their values to in the form // elementName1={keyA=valueA,keyB=valueB,...},...,elementNameN={keyY=valueY,keyZ=valueZ} @@ -151,9 +162,9 @@ func (s *tagStore) getElementsKeys(path *gnmiLib.Path, elements []string) (strin for _, requiredElement := range elements { var found bool var elementKVs []string - for _, el := range keyElements { - if el.Name == requiredElement { - for k, v := range el.Key { + for _, segment := range path.keyValues { + if segment.name == requiredElement { + for k, v := range segment.kv { elementKVs = append(elementKVs, k+"="+v) } found = true diff --git a/plugins/inputs/gnmi/testcases/issue_14044/expected.out b/plugins/inputs/gnmi/testcases/issue_14044/expected.out new file mode 100644 index 0000000000000..ecb759c67ddd3 --- /dev/null +++ b/plugins/inputs/gnmi/testcases/issue_14044/expected.out @@ -0,0 +1 @@ +ifdesc,name=FourHundredGigE0/2/0/3,path=openconfig-interfaces:/interfaces/interface/state,source=127.0.0.1 description="REDACTED" 1696324083211000000 \ No newline at end of file diff --git a/plugins/inputs/gnmi/testcases/issue_14044/responses.json b/plugins/inputs/gnmi/testcases/issue_14044/responses.json new file mode 100644 index 0000000000000..67558673a7050 --- /dev/null +++ b/plugins/inputs/gnmi/testcases/issue_14044/responses.json @@ -0,0 +1,33 @@ +[ + { + "update": { + "timestamp": "1696324083211000000", + "prefix": { + "origin": "openconfig-interfaces" + }, + "update": [ + { + "path": { + "elem": [ + { + "name": "interfaces" + }, + { + "name": "interface", + "key": { + "name": "FourHundredGigE0/2/0/3" + } + }, + { + "name": "state" + } + ] + }, + "val": { + "json_ietf_val": "eyJkZXNjcmlwdGlvbiI6IlJFREFDVEVEIn0=" + } + } + ] + } + } +] \ No newline at end of file diff --git a/plugins/inputs/gnmi/testcases/issue_14044/telegraf.conf b/plugins/inputs/gnmi/testcases/issue_14044/telegraf.conf new file mode 100644 index 0000000000000..903520d9808b3 --- /dev/null +++ b/plugins/inputs/gnmi/testcases/issue_14044/telegraf.conf @@ -0,0 +1,12 @@ +[[inputs.gnmi]] + addresses = ["dummy"] + name_override = "gnmi" + redial = "10s" + encoding = "json_ietf" + guess_path_tag = true + [[inputs.gnmi.subscription]] + name = "ifdesc" + origin = "openconfig-interfaces" + path = '/interfaces/interface[name=FourHundredGigE*]/state/description' + subscription_mode = "sample" + sample_interval = "60s" diff --git a/plugins/inputs/gnmi/testcases/issue_14063/expected.out b/plugins/inputs/gnmi/testcases/issue_14063/expected.out new file mode 100644 index 0000000000000..911b2675de76b --- /dev/null +++ b/plugins/inputs/gnmi/testcases/issue_14063/expected.out @@ -0,0 +1 @@ +ifcounters,path=oc-if:/interfaces/oc-if:interface/oc-if:state/oc-if:counters,source=127.0.0.1 in_1024_to_1518_octet_pkts=0u,in_128_to_255_octet_pkts=0u,in_1519_to_2047_octet_pkts=0u,in_2048_to_4095_octet_pkts=0u,in_256_to_511_octet_pkts=0u,in_4096_to_9216_octet_pkts=0u,in_512_to_1023_octet_pkts=0u,in_64_octet_pkts=0u,in_65_to_127_octet_pkts=0u,in_broadcast_pkts=0u,in_crc_error_pkts=0u,in_discards=0u,in_discards_octets=0u,in_dropped_octets=0u,in_dropped_pkts=0u,in_errors=0u,in_jabber_pkts=0u,in_multicast_pkts=0u,in_octets=0u,in_oversize_pkts=0u,in_pkts=0u,in_undersize_pkts=0u,in_unicast_pkts=0u,last_clear=1691859140059797458u,link_flap_events=0u,name="\\\"1\\",out_1519_to_2047_octet_pkts=0u,out_2048_to_4095_octet_pkts=0u,out_4096_to_9216_octet_pkts=0u,out_broadcast_pkts=0u,out_errors=0u,out_multicast_pkts=0u,out_octets=0u,out_pkts=0u,out_unicast_pkts=0u 1696617695101000000 diff --git a/plugins/inputs/gnmi/testcases/issue_14063/responses.json b/plugins/inputs/gnmi/testcases/issue_14063/responses.json new file mode 100644 index 0000000000000..f0784d196f56b --- /dev/null +++ b/plugins/inputs/gnmi/testcases/issue_14063/responses.json @@ -0,0 +1,445 @@ +[ + { + "update": { + "timestamp": "1696617695101000000", + "prefix": { + "elem": [ + { + "name": "oc-if:interfaces" + }, + { + "name": "oc-if:interface" + }, + { + "name": "oc-if:state" + }, + { + "name": "oc-if:counters" + } + ] + }, + "update": [ + { + "path": { + "elem": [ + { + "name": "in-1024-to-1518-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-128-to-255-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-1519-to-2047-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-2048-to-4095-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-256-to-511-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-4096-to-9216-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-512-to-1023-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-64-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-65-to-127-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-broadcast-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-crc-error-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-discards" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-discards-octets" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-dropped-octets" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-dropped-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-errors" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-jabber-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-multicast-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-octets" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-oversize-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-undersize-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "in-unicast-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "last-clear" + } + ] + }, + "val": { + "uintVal": "1691859140059797458" + } + }, + { + "path": { + "elem": [ + { + "name": "link-flap-events" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "name" + } + ] + }, + "val": { + "stringVal": "\\\"1\\" + } + }, + { + "path": { + "elem": [ + { + "name": "out-1519-to-2047-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-2048-to-4095-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-4096-to-9216-octet-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-broadcast-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-errors" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-multicast-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-octets" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + }, + { + "path": { + "elem": [ + { + "name": "out-unicast-pkts" + } + ] + }, + "val": { + "uintVal": "0" + } + } + ] + } + } +] \ No newline at end of file diff --git a/plugins/inputs/gnmi/testcases/issue_14063/telegraf.conf b/plugins/inputs/gnmi/testcases/issue_14063/telegraf.conf new file mode 100644 index 0000000000000..5790be12bec36 --- /dev/null +++ b/plugins/inputs/gnmi/testcases/issue_14063/telegraf.conf @@ -0,0 +1,11 @@ +[[inputs.gnmi]] + addresses = ["dummy"] + name_override = "gnmi" + redial = "10s" + + [[inputs.gnmi.subscription]] + name = "ifcounters" + origin = "openconfig-interfaces" + path = "/oc-if:interfaces/oc-if:interface/oc-if:state/oc-if:counters" + subscription_mode = "sample" + sample_interval = "30s" \ No newline at end of file diff --git a/plugins/inputs/gnmi/update_fields.go b/plugins/inputs/gnmi/update_fields.go new file mode 100644 index 0000000000000..36e35ac33b176 --- /dev/null +++ b/plugins/inputs/gnmi/update_fields.go @@ -0,0 +1,92 @@ +package gnmi + +import ( + "encoding/json" + "fmt" + "strconv" + + gnmiLib "github.com/openconfig/gnmi/proto/gnmi" + gnmiValue "github.com/openconfig/gnmi/value" +) + +type updateField struct { + path *pathInfo + value interface{} +} + +func newFieldsFromUpdate(path *pathInfo, update *gnmiLib.Update) ([]updateField, error) { + if update.Val == nil || update.Val.Value == nil { + return []updateField{{path: path}}, nil + } + + // Apply some special handling for special types + switch v := update.Val.Value.(type) { + case *gnmiLib.TypedValue_AsciiVal: // not handled in ToScalar + return []updateField{{path, v.AsciiVal}}, nil + case *gnmiLib.TypedValue_JsonVal: // requires special path handling + return processJSON(path, v.JsonVal) + case *gnmiLib.TypedValue_JsonIetfVal: // requires special path handling + return processJSON(path, v.JsonIetfVal) + } + + // Convert the protobuf "oneof" data to a Golang type. + value, err := gnmiValue.ToScalar(update.Val) + if err != nil { + return nil, err + } + return []updateField{{path, value}}, nil +} + +func processJSON(path *pathInfo, data []byte) ([]updateField, error) { + var nested interface{} + if err := json.Unmarshal(data, &nested); err != nil { + return nil, fmt.Errorf("failed to parse JSON value: %w", err) + } + + // Flatten the JSON data to get a key-value map + entries := flatten(nested) + + // Create an update-field with the complete path for all entries + fields := make([]updateField, 0, len(entries)) + for key, v := range entries { + fields = append(fields, updateField{ + path: path.appendSegments(key), + value: v, + }) + } + + return fields, nil +} + +func flatten(nested interface{}) map[string]interface{} { + fields := make(map[string]interface{}) + + switch n := nested.(type) { + case map[string]interface{}: + for k, child := range n { + for ck, cv := range flatten(child) { + key := k + if ck != "" { + key += "/" + ck + } + fields[key] = cv + } + } + case []interface{}: + for i, child := range n { + k := strconv.Itoa(i) + for ck, cv := range flatten(child) { + key := k + if ck != "" { + key += "/" + ck + } + fields[key] = cv + } + } + case nil: + return nil + default: + return map[string]interface{}{"": nested} + } + return fields +} diff --git a/plugins/inputs/gnmi/utils.go b/plugins/inputs/gnmi/utils.go deleted file mode 100644 index dd9e97f403772..0000000000000 --- a/plugins/inputs/gnmi/utils.go +++ /dev/null @@ -1,154 +0,0 @@ -package gnmi - -import ( - "bytes" - "encoding/json" - "fmt" - "math" - "strings" - - gnmiLib "github.com/openconfig/gnmi/proto/gnmi" - - jsonparser "github.com/influxdata/telegraf/plugins/parsers/json" -) - -// Parse path to path-buffer and tag-field -// -//nolint:revive //function-result-limit conditionally 4 return results allowed -func handlePath(gnmiPath *gnmiLib.Path, tags map[string]string, aliases map[string]string, prefix string) (origin, path, alias string, err error) { - builder := bytes.NewBufferString(prefix) - - // Some devices do report the origin in the first path element - // so try to find out if this is the case. - if gnmiPath.Origin == "" && len(gnmiPath.Elem) > 0 { - groups := originPattern.FindStringSubmatch(gnmiPath.Elem[0].Name) - if len(groups) == 2 { - gnmiPath.Origin = groups[1] - gnmiPath.Elem[0].Name = gnmiPath.Elem[0].Name[len(groups[1])+1:] - } - } - - // Prefix with origin - if len(gnmiPath.Origin) > 0 { - origin = gnmiPath.Origin + ":" - } - - // Parse generic keys from prefix - for _, elem := range gnmiPath.Elem { - if len(elem.Name) > 0 { - if _, err := builder.WriteRune('/'); err != nil { - return "", "", "", err - } - if _, err := builder.WriteString(elem.Name); err != nil { - return "", "", "", err - } - } - name := builder.String() - - if _, exists := aliases[origin+name]; exists { - alias = origin + name - } else if _, exists := aliases[name]; exists { - alias = name - } - - if tags != nil { - for key, val := range elem.Key { - key = strings.ReplaceAll(key, "-", "_") - - // Use short-form of key if possible - if _, exists := tags[key]; exists { - tags[name+"/"+key] = val - } else { - tags[key] = val - } - } - } - } - - return origin, builder.String(), alias, nil -} - -// equalPathNoKeys checks if two gNMI paths are equal, without keys -func equalPathNoKeys(a *gnmiLib.Path, b *gnmiLib.Path) bool { - if len(a.Elem) != len(b.Elem) { - return false - } - for i := range a.Elem { - if a.Elem[i].Name != b.Elem[i].Name { - return false - } - } - return true -} - -func pathKeys(gpath *gnmiLib.Path) []*gnmiLib.PathElem { - var newPath []*gnmiLib.PathElem - for _, elem := range gpath.Elem { - if elem.Key != nil { - newPath = append(newPath, elem) - } - } - return newPath -} - -func pathWithPrefix(prefix *gnmiLib.Path, gpath *gnmiLib.Path) *gnmiLib.Path { - if prefix == nil { - return gpath - } - fullPath := new(gnmiLib.Path) - fullPath.Origin = prefix.Origin - fullPath.Target = prefix.Target - fullPath.Elem = append(prefix.Elem, gpath.Elem...) - return fullPath -} - -func gnmiToFields(name string, updateVal *gnmiLib.TypedValue) (map[string]interface{}, error) { - var value interface{} - var jsondata []byte - - // Make sure a value is actually set - if updateVal == nil || updateVal.Value == nil { - return nil, nil - } - - switch val := updateVal.Value.(type) { - case *gnmiLib.TypedValue_AsciiVal: - value = val.AsciiVal - case *gnmiLib.TypedValue_BoolVal: - value = val.BoolVal - case *gnmiLib.TypedValue_BytesVal: - value = val.BytesVal - case *gnmiLib.TypedValue_DoubleVal: - value = val.DoubleVal - case *gnmiLib.TypedValue_DecimalVal: - //nolint:staticcheck // to maintain backward compatibility with older gnmi specs - value = float64(val.DecimalVal.Digits) / math.Pow(10, float64(val.DecimalVal.Precision)) - case *gnmiLib.TypedValue_FloatVal: - //nolint:staticcheck // to maintain backward compatibility with older gnmi specs - value = val.FloatVal - case *gnmiLib.TypedValue_IntVal: - value = val.IntVal - case *gnmiLib.TypedValue_StringVal: - value = val.StringVal - case *gnmiLib.TypedValue_UintVal: - value = val.UintVal - case *gnmiLib.TypedValue_JsonIetfVal: - jsondata = val.JsonIetfVal - case *gnmiLib.TypedValue_JsonVal: - jsondata = val.JsonVal - } - - fields := make(map[string]interface{}) - if value != nil { - fields[name] = value - } else if jsondata != nil { - if err := json.Unmarshal(jsondata, &value); err != nil { - return nil, fmt.Errorf("failed to parse JSON value: %w", err) - } - flattener := jsonparser.JSONFlattener{Fields: fields} - if err := flattener.FullFlattenJSON(name, value, true, true); err != nil { - return nil, fmt.Errorf("failed to flatten JSON: %w", err) - } - } - return fields, nil -} diff --git a/plugins/inputs/hddtemp/hddtemp_test.go b/plugins/inputs/hddtemp/hddtemp_test.go index 44be91bb28bf9..66ed52b585b14 100644 --- a/plugins/inputs/hddtemp/hddtemp_test.go +++ b/plugins/inputs/hddtemp/hddtemp_test.go @@ -43,7 +43,7 @@ func TestFetch(t *testing.T) { err := hddTemp.Gather(acc) require.NoError(t, err) - require.Equal(t, acc.NFields(), 2) + require.Equal(t, 2, acc.NFields()) var tests = []struct { fields map[string]interface{} diff --git a/plugins/inputs/http/http_test.go b/plugins/inputs/http/http_test.go index 4eff3eee390d0..94ebefd6ab0bb 100644 --- a/plugins/inputs/http/http_test.go +++ b/plugins/inputs/http/http_test.go @@ -61,7 +61,7 @@ func TestHTTPWithJSONFormat(t *testing.T) { var metric = acc.Metrics[0] require.Equal(t, metric.Measurement, metricName) require.Len(t, acc.Metrics[0].Fields, 1) - require.Equal(t, acc.Metrics[0].Fields["a"], 1.2) + require.Equal(t, 1.2, acc.Metrics[0].Fields["a"]) require.Equal(t, acc.Metrics[0].Tags["url"], address) } @@ -282,7 +282,7 @@ func TestBodyAndContentEncoding(t *testing.T) { Log: testutil.Logger{}, }, queryHandlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.Header.Get("Content-Encoding"), "gzip") + require.Equal(t, "gzip", r.Header.Get("Content-Encoding")) gr, err := gzip.NewReader(r.Body) require.NoError(t, err) diff --git a/plugins/inputs/internal/internal_test.go b/plugins/inputs/internal/internal_test.go index d5c3d79581c1d..b5f539ece6aa4 100644 --- a/plugins/inputs/internal/internal_test.go +++ b/plugins/inputs/internal/internal_test.go @@ -4,10 +4,10 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/require" + "github.com/influxdata/telegraf/selfstat" "github.com/influxdata/telegraf/testutil" - - "github.com/stretchr/testify/require" ) func TestSelfPlugin(t *testing.T) { @@ -100,7 +100,7 @@ func TestGostats(t *testing.T) { } require.NotNil(t, metric) - require.Equal(t, metric.Measurement, "internal_gostats") + require.Equal(t, "internal_gostats", metric.Measurement) require.Len(t, metric.Tags, 1) require.Contains(t, metric.Tags, "go_version") diff --git a/plugins/inputs/kafka_consumer/kafka_consumer_test.go b/plugins/inputs/kafka_consumer/kafka_consumer_test.go index 3e40ad5ed8d6b..2498b35adf2e7 100644 --- a/plugins/inputs/kafka_consumer/kafka_consumer_test.go +++ b/plugins/inputs/kafka_consumer/kafka_consumer_test.go @@ -70,11 +70,11 @@ func TestInit(t *testing.T) { name: "default config", plugin: &KafkaConsumer{}, check: func(t *testing.T, plugin *KafkaConsumer) { - require.Equal(t, plugin.ConsumerGroup, defaultConsumerGroup) - require.Equal(t, plugin.MaxUndeliveredMessages, defaultMaxUndeliveredMessages) - require.Equal(t, plugin.config.ClientID, "Telegraf") - require.Equal(t, plugin.config.Consumer.Offsets.Initial, sarama.OffsetOldest) - require.Equal(t, plugin.config.Consumer.MaxProcessingTime, 100*time.Millisecond) + require.Equal(t, defaultConsumerGroup, plugin.ConsumerGroup) + require.Equal(t, defaultMaxUndeliveredMessages, plugin.MaxUndeliveredMessages) + require.Equal(t, "Telegraf", plugin.config.ClientID) + require.Equal(t, sarama.OffsetOldest, plugin.config.Consumer.Offsets.Initial) + require.Equal(t, 100*time.Millisecond, plugin.config.Consumer.MaxProcessingTime) }, }, { @@ -114,7 +114,7 @@ func TestInit(t *testing.T) { Log: testutil.Logger{}, }, check: func(t *testing.T, plugin *KafkaConsumer) { - require.Equal(t, plugin.config.ClientID, "custom") + require.Equal(t, "custom", plugin.config.ClientID) }, }, { @@ -124,7 +124,7 @@ func TestInit(t *testing.T) { Log: testutil.Logger{}, }, check: func(t *testing.T, plugin *KafkaConsumer) { - require.Equal(t, plugin.config.Consumer.Offsets.Initial, sarama.OffsetNewest) + require.Equal(t, sarama.OffsetNewest, plugin.config.Consumer.Offsets.Initial) }, }, { @@ -197,7 +197,7 @@ func TestInit(t *testing.T) { Log: testutil.Logger{}, }, check: func(t *testing.T, plugin *KafkaConsumer) { - require.Equal(t, plugin.config.Consumer.MaxProcessingTime, 1000*time.Millisecond) + require.Equal(t, 1000*time.Millisecond, plugin.config.Consumer.MaxProcessingTime) }, }, } diff --git a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go b/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go index 740a9dced2974..1aeeefc4d7909 100644 --- a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go +++ b/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go @@ -5,13 +5,12 @@ import ( "testing" "github.com/Shopify/sarama" + "github.com/stretchr/testify/require" "github.com/influxdata/telegraf/plugins/parsers/graphite" "github.com/influxdata/telegraf/plugins/parsers/influx" "github.com/influxdata/telegraf/plugins/parsers/json" "github.com/influxdata/telegraf/testutil" - - "github.com/stretchr/testify/require" ) const ( @@ -52,7 +51,7 @@ func TestRunParser(t *testing.T) { in <- saramaMsg(testMsg) acc.Wait(1) - require.Equal(t, acc.NFields(), 1) + require.Equal(t, 1, acc.NFields()) } // Test that the parser ignores invalid messages @@ -70,7 +69,7 @@ func TestRunParserInvalidMsg(t *testing.T) { in <- saramaMsg(invalidMsg) acc.WaitError(1) - require.Equal(t, acc.NFields(), 0) + require.Equal(t, 0, acc.NFields()) } // Test that overlong messages are dropped @@ -87,7 +86,7 @@ func TestDropOverlongMsg(t *testing.T) { in <- saramaMsg(overlongMsg) acc.WaitError(1) - require.Equal(t, acc.NFields(), 0) + require.Equal(t, 0, acc.NFields()) } // Test that the parser parses kafka messages into points @@ -107,7 +106,7 @@ func TestRunParserAndGather(t *testing.T) { require.NoError(t, acc.GatherError(k.Gather)) - require.Equal(t, acc.NFields(), 1) + require.Equal(t, 1, acc.NFields()) acc.AssertContainsFields(t, "cpu_load_short", map[string]interface{}{"value": float64(23422)}) } @@ -128,7 +127,7 @@ func TestRunParserAndGatherGraphite(t *testing.T) { require.NoError(t, acc.GatherError(k.Gather)) - require.Equal(t, acc.NFields(), 1) + require.Equal(t, 1, acc.NFields()) acc.AssertContainsFields(t, "cpu_load_short_graphite", map[string]interface{}{"value": float64(23422)}) } @@ -151,7 +150,7 @@ func TestRunParserAndGatherJSON(t *testing.T) { require.NoError(t, acc.GatherError(k.Gather)) - require.Equal(t, acc.NFields(), 2) + require.Equal(t, 2, acc.NFields()) acc.AssertContainsFields(t, "kafka_json_test", map[string]interface{}{ "a": float64(5), diff --git a/plugins/inputs/libvirt/libvirt_test.go b/plugins/inputs/libvirt/libvirt_test.go index a080db6248510..eb137a031b655 100644 --- a/plugins/inputs/libvirt/libvirt_test.go +++ b/plugins/inputs/libvirt/libvirt_test.go @@ -299,7 +299,7 @@ func TestLibvirt_calculateMetricNumber(t *testing.T) { } err := l.calculateMetricNumber() require.NoError(t, err) - require.Equal(t, l.metricNumber, domainStatsAll) + require.Equal(t, domainStatsAll, l.metricNumber) }) } diff --git a/plugins/inputs/logparser/logparser_test.go b/plugins/inputs/logparser/logparser_test.go index 5ce5cc8fa7872..858a7cecc240f 100644 --- a/plugins/inputs/logparser/logparser_test.go +++ b/plugins/inputs/logparser/logparser_test.go @@ -136,7 +136,7 @@ func TestGrokParseLogFilesAppearLater(t *testing.T) { acc := testutil.Accumulator{} require.NoError(t, logparser.Start(&acc)) - require.Equal(t, acc.NFields(), 0) + require.Equal(t, 0, acc.NFields()) input, err := os.ReadFile(filepath.Join(testdataDir, "test_a.log")) require.NoError(t, err) diff --git a/plugins/inputs/mesos/mesos_test.go b/plugins/inputs/mesos/mesos_test.go index 65f399a97c964..45a36cfb63bee 100644 --- a/plugins/inputs/mesos/mesos_test.go +++ b/plugins/inputs/mesos/mesos_test.go @@ -416,14 +416,14 @@ func TestWithPathDoesNotModify(t *testing.T) { u, err := url.Parse("http://localhost:5051") require.NoError(t, err) v := withPath(u, "/xyzzy") - require.Equal(t, u.String(), "http://localhost:5051") - require.Equal(t, v.String(), "http://localhost:5051/xyzzy") + require.Equal(t, "http://localhost:5051", u.String()) + require.Equal(t, "http://localhost:5051/xyzzy", v.String()) } func TestURLTagDoesNotModify(t *testing.T) { u, err := url.Parse("http://a:b@localhost:5051?timeout=1ms") require.NoError(t, err) v := urlTag(u) - require.Equal(t, u.String(), "http://a:b@localhost:5051?timeout=1ms") - require.Equal(t, v, "http://localhost:5051") + require.Equal(t, "http://a:b@localhost:5051?timeout=1ms", u.String()) + require.Equal(t, "http://localhost:5051", v) } diff --git a/plugins/inputs/modbus/modbus_test.go b/plugins/inputs/modbus/modbus_test.go index 00d715b34536a..419ba6795674a 100644 --- a/plugins/inputs/modbus/modbus_test.go +++ b/plugins/inputs/modbus/modbus_test.go @@ -293,7 +293,7 @@ func TestRetryFailIllegal(t *testing.T) { require.NoError(t, modbus.Gather(&acc)) require.Len(t, acc.Errors, 1) require.EqualError(t, acc.FirstError(), "slave 1: modbus: exception '1' (illegal function), function '129'") - require.Equal(t, counter, 1) + require.Equal(t, 1, counter) } func TestCases(t *testing.T) { diff --git a/plugins/inputs/mongodb/mongodb_data_test.go b/plugins/inputs/mongodb/mongodb_data_test.go index af267283621e3..e8dc0ce82d708 100644 --- a/plugins/inputs/mongodb/mongodb_data_test.go +++ b/plugins/inputs/mongodb/mongodb_data_test.go @@ -329,7 +329,7 @@ func TestAddShardHostStats(t *testing.T) { } sort.Strings(hostsFound) sort.Strings(expectedHosts) - require.Equal(t, hostsFound, expectedHosts) + require.Equal(t, expectedHosts, hostsFound) } func TestStateTag(t *testing.T) { diff --git a/plugins/inputs/mongodb/mongostat_test.go b/plugins/inputs/mongodb/mongostat_test.go index 47ba058847e07..3b8ded4ced4c8 100644 --- a/plugins/inputs/mongodb/mongostat_test.go +++ b/plugins/inputs/mongodb/mongostat_test.go @@ -53,12 +53,12 @@ func TestLatencyStats(t *testing.T) { 60, ) - require.Equal(t, sl.CommandLatency, int64(0)) - require.Equal(t, sl.ReadLatency, int64(0)) - require.Equal(t, sl.WriteLatency, int64(0)) - require.Equal(t, sl.CommandOpsCnt, int64(0)) - require.Equal(t, sl.ReadOpsCnt, int64(0)) - require.Equal(t, sl.WriteOpsCnt, int64(0)) + require.Equal(t, int64(0), sl.CommandLatency) + require.Equal(t, int64(0), sl.ReadLatency) + require.Equal(t, int64(0), sl.WriteLatency) + require.Equal(t, int64(0), sl.CommandOpsCnt) + require.Equal(t, int64(0), sl.ReadOpsCnt) + require.Equal(t, int64(0), sl.WriteOpsCnt) } func TestLatencyStatsDiffZero(t *testing.T) { @@ -122,12 +122,12 @@ func TestLatencyStatsDiffZero(t *testing.T) { 60, ) - require.Equal(t, sl.CommandLatency, int64(0)) - require.Equal(t, sl.ReadLatency, int64(0)) - require.Equal(t, sl.WriteLatency, int64(0)) - require.Equal(t, sl.CommandOpsCnt, int64(0)) - require.Equal(t, sl.ReadOpsCnt, int64(0)) - require.Equal(t, sl.WriteOpsCnt, int64(0)) + require.Equal(t, int64(0), sl.CommandLatency) + require.Equal(t, int64(0), sl.ReadLatency) + require.Equal(t, int64(0), sl.WriteLatency) + require.Equal(t, int64(0), sl.CommandOpsCnt) + require.Equal(t, int64(0), sl.ReadOpsCnt) + require.Equal(t, int64(0), sl.WriteOpsCnt) } func TestLatencyStatsDiff(t *testing.T) { @@ -191,12 +191,12 @@ func TestLatencyStatsDiff(t *testing.T) { 60, ) - require.Equal(t, sl.CommandLatency, int64(59177981552)) - require.Equal(t, sl.ReadLatency, int64(2255946760057)) - require.Equal(t, sl.WriteLatency, int64(494479456987)) - require.Equal(t, sl.CommandOpsCnt, int64(1019152861)) - require.Equal(t, sl.ReadOpsCnt, int64(4189049884)) - require.Equal(t, sl.WriteOpsCnt, int64(1691021287)) + require.Equal(t, int64(59177981552), sl.CommandLatency) + require.Equal(t, int64(2255946760057), sl.ReadLatency) + require.Equal(t, int64(494479456987), sl.WriteLatency) + require.Equal(t, int64(1019152861), sl.CommandOpsCnt) + require.Equal(t, int64(4189049884), sl.ReadOpsCnt) + require.Equal(t, int64(1691021287), sl.WriteOpsCnt) } func TestLocksStatsNilWhenLocksMissingInOldStat(t *testing.T) { diff --git a/plugins/inputs/monit/monit_test.go b/plugins/inputs/monit/monit_test.go index 9ec38d7640b24..7538a997e3b8f 100644 --- a/plugins/inputs/monit/monit_test.go +++ b/plugins/inputs/monit/monit_test.go @@ -591,7 +591,7 @@ func TestInvalidUsernameOrPassword(t *testing.T) { return } - require.Equal(t, r.URL.Path, "/_status", "Cannot handle request") + require.Equal(t, "/_status", r.URL.Path, "Cannot handle request") http.ServeFile(w, r, "testdata/response_servicetype_0.xml") })) @@ -618,7 +618,7 @@ func TestNoUsernameOrPasswordConfiguration(t *testing.T) { return } - require.Equal(t, r.URL.Path, "/_status", "Cannot handle request") + require.Equal(t, "/_status", r.URL.Path, "Cannot handle request") http.ServeFile(w, r, "testdata/response_servicetype_0.xml") })) diff --git a/plugins/inputs/mqtt_consumer/mqtt_consumer_test.go b/plugins/inputs/mqtt_consumer/mqtt_consumer_test.go index 857bcad465922..f01cf986832ca 100644 --- a/plugins/inputs/mqtt_consumer/mqtt_consumer_test.go +++ b/plugins/inputs/mqtt_consumer/mqtt_consumer_test.go @@ -6,10 +6,11 @@ import ( "time" mqtt "github.com/eclipse/paho.mqtt.golang" + "github.com/stretchr/testify/require" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/parsers/influx" "github.com/influxdata/telegraf/testutil" - "github.com/stretchr/testify/require" ) type FakeClient struct { @@ -527,7 +528,7 @@ func TestAddRouteCalledForEachTopic(t *testing.T) { plugin.Stop() - require.Equal(t, client.addRouteCallCount, 2) + require.Equal(t, 2, client.addRouteCallCount) } func TestSubscribeCalledIfNoSession(t *testing.T) { @@ -558,7 +559,7 @@ func TestSubscribeCalledIfNoSession(t *testing.T) { plugin.Stop() - require.Equal(t, client.subscribeCallCount, 1) + require.Equal(t, 1, client.subscribeCallCount) } func TestSubscribeNotCalledIfSession(t *testing.T) { @@ -589,5 +590,5 @@ func TestSubscribeNotCalledIfSession(t *testing.T) { plugin.Stop() - require.Equal(t, client.subscribeCallCount, 0) + require.Equal(t, 0, client.subscribeCallCount) } diff --git a/plugins/inputs/nats/nats_test.go b/plugins/inputs/nats/nats_test.go index 906bf14e90ef5..588a958e882c6 100644 --- a/plugins/inputs/nats/nats_test.go +++ b/plugins/inputs/nats/nats_test.go @@ -8,8 +8,9 @@ import ( "net/http/httptest" "testing" - "github.com/influxdata/telegraf/testutil" "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/testutil" ) var sampleVarz = ` @@ -70,7 +71,7 @@ func TestMetricsCorrect(t *testing.T) { var acc testutil.Accumulator srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.URL.Path, "/varz", "Cannot handle request") + require.Equal(t, "/varz", r.URL.Path, "Cannot handle request") rsp := sampleVarz _, err := fmt.Fprintln(w, rsp) diff --git a/plugins/inputs/neptune_apex/neptune_apex_test.go b/plugins/inputs/neptune_apex/neptune_apex_test.go index 29a1c6473e2ec..b77e8ac1edb12 100644 --- a/plugins/inputs/neptune_apex/neptune_apex_test.go +++ b/plugins/inputs/neptune_apex/neptune_apex_test.go @@ -370,7 +370,7 @@ func TestParseXML(t *testing.T) { } // No error case require.NoErrorf(t, err, "expected no error but got: %v", err) - require.Equalf(t, len(acc.Errors) > 0, test.wantAccErr, + require.Equalf(t, test.wantAccErr, len(acc.Errors) > 0, "Accumulator errors. got=%v, want=%t", acc.Errors, test.wantAccErr) testutil.RequireMetricsEqual(t, acc.GetTelegrafMetrics(), test.wantMetrics) @@ -511,7 +511,7 @@ func TestFindProbe(t *testing.T) { t.Run(test.name, func(t *testing.T) { t.Parallel() index := findProbe(test.probeName, fakeProbes) - require.Equalf(t, index, test.wantIndex, "probe index mismatch; got=%d, want %d", index, test.wantIndex) + require.Equalf(t, test.wantIndex, index, "probe index mismatch; got=%d, want %d", index, test.wantIndex) }) } } diff --git a/plugins/inputs/nginx_plus/nginx_plus_test.go b/plugins/inputs/nginx_plus/nginx_plus_test.go index 24aa708d567d1..cac4676b0dffd 100644 --- a/plugins/inputs/nginx_plus/nginx_plus_test.go +++ b/plugins/inputs/nginx_plus/nginx_plus_test.go @@ -253,7 +253,7 @@ func TestNginxPlusGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var rsp string - require.Equal(t, r.URL.Path, "/status", "Cannot handle request") + require.Equal(t, "/status", r.URL.Path, "Cannot handle request") rsp = sampleStatusResponse w.Header()["Content-Type"] = []string{"application/json"} diff --git a/plugins/inputs/nginx_sts/nginx_sts_test.go b/plugins/inputs/nginx_sts/nginx_sts_test.go index 9ebb5f91ad9d8..121036167696a 100644 --- a/plugins/inputs/nginx_sts/nginx_sts_test.go +++ b/plugins/inputs/nginx_sts/nginx_sts_test.go @@ -8,8 +8,9 @@ import ( "net/url" "testing" - "github.com/influxdata/telegraf/testutil" "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/testutil" ) const sampleStatusResponse = ` @@ -166,7 +167,7 @@ func TestNginxPlusGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var rsp string - require.Equal(t, r.URL.Path, "/status", "Cannot handle request") + require.Equal(t, "/status", r.URL.Path, "Cannot handle request") rsp = sampleStatusResponse w.Header()["Content-Type"] = []string{"application/json"} diff --git a/plugins/inputs/nginx_upstream_check/nginx_upstream_check_test.go b/plugins/inputs/nginx_upstream_check/nginx_upstream_check_test.go index 353619b362228..e12bb1e2ccbe3 100644 --- a/plugins/inputs/nginx_upstream_check/nginx_upstream_check_test.go +++ b/plugins/inputs/nginx_upstream_check/nginx_upstream_check_test.go @@ -6,8 +6,9 @@ import ( "net/http/httptest" "testing" - "github.com/influxdata/telegraf/testutil" "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/testutil" ) const sampleStatusResponse = ` @@ -45,7 +46,7 @@ func TestNginxUpstreamCheckData(test *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(responseWriter http.ResponseWriter, request *http.Request) { var response string - require.Equal(test, request.URL.Path, "/status", "Cannot handle request") + require.Equal(test, "/status", request.URL.Path, "Cannot handle request") response = sampleStatusResponse responseWriter.Header()["Content-Type"] = []string{"application/json"} @@ -102,7 +103,7 @@ func TestNginxUpstreamCheckRequest(test *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(responseWriter http.ResponseWriter, request *http.Request) { var response string - require.Equal(test, request.URL.Path, "/status", "Cannot handle request") + require.Equal(test, "/status", request.URL.Path, "Cannot handle request") response = sampleStatusResponse responseWriter.Header()["Content-Type"] = []string{"application/json"} @@ -110,10 +111,10 @@ func TestNginxUpstreamCheckRequest(test *testing.T) { _, err := fmt.Fprintln(responseWriter, response) require.NoError(test, err) - require.Equal(test, request.Method, "POST") - require.Equal(test, request.Header.Get("X-Test"), "test-value") - require.Equal(test, request.Header.Get("Authorization"), "Basic dXNlcjpwYXNzd29yZA==") - require.Equal(test, request.Host, "status.local") + require.Equal(test, "POST", request.Method) + require.Equal(test, "test-value", request.Header.Get("X-Test")) + require.Equal(test, "Basic dXNlcjpwYXNzd29yZA==", request.Header.Get("Authorization")) + require.Equal(test, "status.local", request.Host) })) defer testServer.Close() diff --git a/plugins/inputs/nginx_vts/nginx_vts_test.go b/plugins/inputs/nginx_vts/nginx_vts_test.go index 589bc634f9358..cb4567947d47e 100644 --- a/plugins/inputs/nginx_vts/nginx_vts_test.go +++ b/plugins/inputs/nginx_vts/nginx_vts_test.go @@ -8,8 +8,9 @@ import ( "net/url" "testing" - "github.com/influxdata/telegraf/testutil" "github.com/stretchr/testify/require" + + "github.com/influxdata/telegraf/testutil" ) const sampleStatusResponse = ` @@ -203,7 +204,7 @@ func TestNginxPlusGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var rsp string - require.Equal(t, r.URL.Path, "/status", "Cannot handle request") + require.Equal(t, "/status", r.URL.Path, "Cannot handle request") rsp = sampleStatusResponse w.Header()["Content-Type"] = []string{"application/json"} diff --git a/plugins/inputs/openntpd/openntpd_test.go b/plugins/inputs/openntpd/openntpd_test.go index ffca02b31a908..88bb206480199 100644 --- a/plugins/inputs/openntpd/openntpd_test.go +++ b/plugins/inputs/openntpd/openntpd_test.go @@ -25,9 +25,9 @@ func TestParseSimpleOutput(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("openntpd")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) - require.Equal(t, acc.NFields(), 7) + require.Equal(t, 7, acc.NFields()) firstpeerfields := map[string]interface{}{ "wt": int64(1), @@ -56,9 +56,9 @@ func TestParseSimpleOutputwithStatePrefix(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("openntpd")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) - require.Equal(t, acc.NFields(), 7) + require.Equal(t, 7, acc.NFields()) firstpeerfields := map[string]interface{}{ "wt": int64(1), @@ -88,9 +88,9 @@ func TestParseSimpleOutputInvalidPeer(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("openntpd")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) - require.Equal(t, acc.NFields(), 4) + require.Equal(t, 4, acc.NFields()) firstpeerfields := map[string]interface{}{ "wt": int64(1), @@ -116,9 +116,9 @@ func TestParseSimpleOutputServersDNSError(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("openntpd")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) - require.Equal(t, acc.NFields(), 4) + require.Equal(t, 4, acc.NFields()) firstpeerfields := map[string]interface{}{ "next": int64(2), @@ -158,9 +158,9 @@ func TestParseSimpleOutputServerDNSError(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("openntpd")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) - require.Equal(t, acc.NFields(), 4) + require.Equal(t, 4, acc.NFields()) firstpeerfields := map[string]interface{}{ "next": int64(12), @@ -186,9 +186,9 @@ func TestParseFullOutput(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("openntpd")) - require.Equal(t, acc.NMetrics(), uint64(20)) + require.Equal(t, uint64(20), acc.NMetrics()) - require.Equal(t, acc.NFields(), 113) + require.Equal(t, 113, acc.NFields()) firstpeerfields := map[string]interface{}{ "wt": int64(1), diff --git a/plugins/inputs/opensmtpd/opensmtpd_test.go b/plugins/inputs/opensmtpd/opensmtpd_test.go index 3b625be51cef2..47e536c728cb9 100644 --- a/plugins/inputs/opensmtpd/opensmtpd_test.go +++ b/plugins/inputs/opensmtpd/opensmtpd_test.go @@ -25,9 +25,9 @@ func TestFilterSomeStats(t *testing.T) { require.NoError(t, err) require.True(t, acc.HasMeasurement("opensmtpd")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) - require.Equal(t, acc.NFields(), 36) + require.Equal(t, 36, acc.NFields()) acc.AssertContainsFields(t, "opensmtpd", parsedFullOutput) } diff --git a/plugins/inputs/p4runtime/p4runtime_test.go b/plugins/inputs/p4runtime/p4runtime_test.go index fe1c3f89dfc92..3bb297e38ca4a 100644 --- a/plugins/inputs/p4runtime/p4runtime_test.go +++ b/plugins/inputs/p4runtime/p4runtime_test.go @@ -641,9 +641,8 @@ func TestFilterCounterNamesInclude(t *testing.T) { filteredCounters := filterCounters(counters, counterNamesInclude) require.Equal( t, - filteredCounters, []*p4ConfigV1.Counter{ createCounter("bar", 2, p4ConfigV1.CounterSpec_BOTH), - }, + }, filteredCounters, ) } diff --git a/plugins/inputs/ping/ping_test.go b/plugins/inputs/ping/ping_test.go index 46e1db3f657c9..205420e22519a 100644 --- a/plugins/inputs/ping/ping_test.go +++ b/plugins/inputs/ping/ping_test.go @@ -222,7 +222,7 @@ func TestArguments(t *testing.T) { for _, system := range []string{"darwin", "linux", "anything else"} { actual := p.args("www.google.com", system) - require.Equal(t, actual, expected) + require.Equal(t, expected, actual) } } @@ -405,7 +405,7 @@ func TestPingBinary(t *testing.T) { Urls: []string{"www.google.com"}, Binary: "ping6", pingHost: func(binary string, timeout float64, args ...string) (string, error) { - require.Equal(t, binary, "ping6") + require.Equal(t, "ping6", binary) return "", nil }, } diff --git a/plugins/inputs/procstat/README.md b/plugins/inputs/procstat/README.md index 83719fb8c2e74..ae15e497a1d70 100644 --- a/plugins/inputs/procstat/README.md +++ b/plugins/inputs/procstat/README.md @@ -115,14 +115,7 @@ Below are an example set of tags and fields: - child_minor_faults (int) - created_at (int) [epoch in nanoseconds] - cpu_time (int) - - cpu_time_guest (float) - - cpu_time_guest_nice (float) - - cpu_time_idle (float) - - cpu_time_iowait (float) - - cpu_time_irq (float) - - cpu_time_nice (float) - - cpu_time_soft_irq (float) - - cpu_time_steal (float) + - cpu_time_iowait (float) (zero for all OSes except Linux) - cpu_time_system (float) - cpu_time_user (float) - cpu_usage (float) @@ -199,5 +192,5 @@ Below are an example set of tags and fields: ```text procstat_lookup,host=prash-laptop,pattern=influxd,pid_finder=pgrep,result=success pid_count=1i,running=1i,result_code=0i 1582089700000000000 -procstat,host=prash-laptop,pattern=influxd,process_name=influxd,user=root involuntary_context_switches=151496i,child_minor_faults=1061i,child_major_faults=8i,cpu_time_user=2564.81,cpu_time_idle=0,cpu_time_irq=0,cpu_time_guest=0,pid=32025i,major_faults=8609i,created_at=1580107536000000000i,voluntary_context_switches=1058996i,cpu_time_system=616.98,cpu_time_steal=0,cpu_time_guest_nice=0,memory_swap=0i,memory_locked=0i,memory_usage=1.7797634601593018,num_threads=18i,cpu_time_nice=0,cpu_time_iowait=0,cpu_time_soft_irq=0,memory_rss=148643840i,memory_vms=1435688960i,memory_data=0i,memory_stack=0i,minor_faults=1856550i 1582089700000000000 +procstat,host=prash-laptop,pattern=influxd,process_name=influxd,user=root involuntary_context_switches=151496i,child_minor_faults=1061i,child_major_faults=8i,cpu_time_user=2564.81,pid=32025i,major_faults=8609i,created_at=1580107536000000000i,voluntary_context_switches=1058996i,cpu_time_system=616.98,memory_swap=0i,memory_locked=0i,memory_usage=1.7797634601593018,num_threads=18i,cpu_time_iowait=0,memory_rss=148643840i,memory_vms=1435688960i,memory_data=0i,memory_stack=0i,minor_faults=1856550i 1582089700000000000 ``` diff --git a/plugins/inputs/procstat/native_finder.go b/plugins/inputs/procstat/native_finder.go index 134336ee6cf85..13fea9fe3bb1f 100644 --- a/plugins/inputs/procstat/native_finder.go +++ b/plugins/inputs/procstat/native_finder.go @@ -45,7 +45,7 @@ func (pg *NativeFinder) PidFile(path string) ([]PID, error) { var pids []PID pidString, err := os.ReadFile(path) if err != nil { - return pids, fmt.Errorf("Failed to read pidfile %q: %w", path, err) + return pids, fmt.Errorf("failed to read pidfile %q: %w", path, err) } pid, err := strconv.ParseInt(strings.TrimSpace(string(pidString)), 10, 32) if err != nil { diff --git a/plugins/inputs/procstat/native_finder_windows_test.go b/plugins/inputs/procstat/native_finder_windows_test.go index 2a90344fa6761..337188921cb2d 100644 --- a/plugins/inputs/procstat/native_finder_windows_test.go +++ b/plugins/inputs/procstat/native_finder_windows_test.go @@ -17,7 +17,7 @@ func TestGather_RealPatternIntegration(t *testing.T) { pids, err := pg.Pattern(`procstat`) require.NoError(t, err) fmt.Println(pids) - require.Equal(t, len(pids) > 0, true) + require.Equal(t, true, len(pids) > 0) } func TestGather_RealFullPatternIntegration(t *testing.T) { @@ -29,7 +29,7 @@ func TestGather_RealFullPatternIntegration(t *testing.T) { pids, err := pg.FullPattern(`%procstat%`) require.NoError(t, err) fmt.Println(pids) - require.Equal(t, len(pids) > 0, true) + require.Equal(t, true, len(pids) > 0) } func TestGather_RealUserIntegration(t *testing.T) { @@ -43,5 +43,5 @@ func TestGather_RealUserIntegration(t *testing.T) { pids, err := pg.UID(currentUser.Username) require.NoError(t, err) fmt.Println(pids) - require.Equal(t, len(pids) > 0, true) + require.Equal(t, true, len(pids) > 0) } diff --git a/plugins/inputs/procstat/procstat.go b/plugins/inputs/procstat/procstat.go index eb475c7e4c528..51fe845069e47 100644 --- a/plugins/inputs/procstat/procstat.go +++ b/plugins/inputs/procstat/procstat.go @@ -197,23 +197,16 @@ func (p *Procstat) addMetric(proc Process, acc telegraf.Accumulator, t time.Time fields[prefix+"write_bytes"] = io.WriteBytes } - createdAt, err := proc.CreateTime() //Returns epoch in ms + createdAt, err := proc.CreateTime() // returns epoch in ms if err == nil { - fields[prefix+"created_at"] = createdAt * 1000000 //Convert ms to ns + fields[prefix+"created_at"] = createdAt * 1000000 // ms to ns } cpuTime, err := proc.Times() if err == nil { fields[prefix+"cpu_time_user"] = cpuTime.User fields[prefix+"cpu_time_system"] = cpuTime.System - fields[prefix+"cpu_time_idle"] = cpuTime.Idle - fields[prefix+"cpu_time_nice"] = cpuTime.Nice - fields[prefix+"cpu_time_iowait"] = cpuTime.Iowait - fields[prefix+"cpu_time_irq"] = cpuTime.Irq - fields[prefix+"cpu_time_soft_irq"] = cpuTime.Softirq - fields[prefix+"cpu_time_steal"] = cpuTime.Steal - fields[prefix+"cpu_time_guest"] = cpuTime.Guest - fields[prefix+"cpu_time_guest_nice"] = cpuTime.GuestNice + fields[prefix+"cpu_time_iowait"] = cpuTime.Iowait // only reported on Linux } cpuPerc, err := proc.Percent(time.Duration(0)) diff --git a/plugins/inputs/prometheus/prometheus_test.go b/plugins/inputs/prometheus/prometheus_test.go index b3f132903fefb..2b1c372528115 100644 --- a/plugins/inputs/prometheus/prometheus_test.go +++ b/plugins/inputs/prometheus/prometheus_test.go @@ -364,7 +364,7 @@ func TestPrometheusGeneratesSummaryMetricsV2(t *testing.T) { err = acc.GatherError(p.Gather) require.NoError(t, err) - require.Equal(t, acc.TagSetValue("prometheus", "quantile"), "0") + require.Equal(t, "0", acc.TagSetValue("prometheus", "quantile")) require.True(t, acc.HasFloatField("prometheus", "go_gc_duration_seconds_sum")) require.True(t, acc.HasFloatField("prometheus", "go_gc_duration_seconds_count")) require.Equal(t, acc.TagValue("prometheus", "url"), ts.URL+"/metrics") diff --git a/plugins/inputs/proxmox/proxmox_test.go b/plugins/inputs/proxmox/proxmox_test.go index fc7eb2d83724a..9e96c87255f7d 100644 --- a/plugins/inputs/proxmox/proxmox_test.go +++ b/plugins/inputs/proxmox/proxmox_test.go @@ -65,7 +65,7 @@ func TestGetNodeSearchDomain(t *testing.T) { err := getNodeSearchDomain(px) require.NoError(t, err) - require.Equal(t, px.nodeSearchDomain, "test.example.com") + require.Equal(t, "test.example.com", px.nodeSearchDomain) } func TestGatherLxcData(t *testing.T) { @@ -75,7 +75,7 @@ func TestGatherLxcData(t *testing.T) { acc := &testutil.Accumulator{} gatherLxcData(px, acc) - require.Equal(t, acc.NFields(), 15) + require.Equal(t, 15, acc.NFields()) testFields := map[string]interface{}{ "status": "running", "uptime": int64(2078164), @@ -109,7 +109,7 @@ func TestGatherQemuData(t *testing.T) { acc := &testutil.Accumulator{} gatherQemuData(px, acc) - require.Equal(t, acc.NFields(), 15) + require.Equal(t, 15, acc.NFields()) testFields := map[string]interface{}{ "status": "running", "uptime": int64(2159739), @@ -145,5 +145,5 @@ func TestGather(t *testing.T) { require.NoError(t, err) // Results from both tests above - require.Equal(t, acc.NFields(), 30) + require.Equal(t, 30, acc.NFields()) } diff --git a/plugins/inputs/raindrops/raindrops_test.go b/plugins/inputs/raindrops/raindrops_test.go index 6da64dbb4d207..387c2defc24a7 100644 --- a/plugins/inputs/raindrops/raindrops_test.go +++ b/plugins/inputs/raindrops/raindrops_test.go @@ -49,7 +49,7 @@ func TestRaindropsGeneratesMetrics(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var rsp string - require.Equal(t, r.URL.Path, "/_raindrops", "Cannot handle request") + require.Equal(t, "/_raindrops", r.URL.Path, "Cannot handle request") rsp = sampleResponse _, err := fmt.Fprintln(w, rsp) diff --git a/plugins/inputs/snmp/gosmi_test.go b/plugins/inputs/snmp/gosmi_test.go index fb56232695c29..57ff77b15e679 100644 --- a/plugins/inputs/snmp/gosmi_test.go +++ b/plugins/inputs/snmp/gosmi_test.go @@ -267,7 +267,7 @@ func TestTableBuild_walk_noTranslate(t *testing.T) { tb, err := tbl.Build(gosmiTsc, true, tr) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "foo", @@ -350,7 +350,7 @@ func TestTableBuild_walk_Translate(t *testing.T) { tb, err := tbl.Build(gosmiTsc, true, tr) require.NoError(t, err) - require.Equal(t, tb.Name, "atTable") + require.Equal(t, "atTable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ @@ -698,7 +698,7 @@ func TestTableJoin_walkGosmi(t *testing.T) { tb, err := tbl.Build(gosmiTsc, true, tr) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "instance", @@ -781,7 +781,7 @@ func TestTableOuterJoin_walkGosmi(t *testing.T) { tb, err := tbl.Build(gosmiTsc, true, tr) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "instance", @@ -873,7 +873,7 @@ func TestTableJoinNoIndexAsTag_walkGosmi(t *testing.T) { tb, err := tbl.Build(gosmiTsc, true, tr) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "instance", diff --git a/plugins/inputs/snmp/snmp_test.go b/plugins/inputs/snmp/snmp_test.go index 326a9d21742ed..44aec0ae99c8b 100644 --- a/plugins/inputs/snmp/snmp_test.go +++ b/plugins/inputs/snmp/snmp_test.go @@ -332,7 +332,7 @@ func TestGetSNMPConnection_v3(t *testing.T) { gsc, err := s.getConnection(0) require.NoError(t, err) gs := gsc.(snmp.GosnmpWrapper) - require.Equal(t, gs.Version, gosnmp.Version3) + require.Equal(t, gosnmp.Version3, gs.Version) sp := gs.SecurityParameters.(*gosnmp.UsmSecurityParameters) require.Equal(t, "1.2.3.4", gsc.Host()) require.EqualValues(t, 20, gs.MaxRepetitions) @@ -453,7 +453,7 @@ func TestGetSNMPConnection_v3_blumenthal(t *testing.T) { gsc, err := s.getConnection(0) require.NoError(t, err) gs := gsc.(snmp.GosnmpWrapper) - require.Equal(t, gs.Version, gosnmp.Version3) + require.Equal(t, gosnmp.Version3, gs.Version) sp := gs.SecurityParameters.(*gosnmp.UsmSecurityParameters) require.Equal(t, "1.2.3.4", gsc.Host()) require.EqualValues(t, 20, gs.MaxRepetitions) @@ -644,7 +644,7 @@ func TestTableBuild_walk(t *testing.T) { tb, err := tbl.Build(tsc, true, NewNetsnmpTranslator()) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "foo", @@ -988,7 +988,7 @@ func TestTableJoin_walk(t *testing.T) { tb, err := tbl.Build(tsc, true, NewNetsnmpTranslator()) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "instance", @@ -1065,7 +1065,7 @@ func TestTableOuterJoin_walk(t *testing.T) { tb, err := tbl.Build(tsc, true, NewNetsnmpTranslator()) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "instance", @@ -1151,7 +1151,7 @@ func TestTableJoinNoIndexAsTag_walk(t *testing.T) { tb, err := tbl.Build(tsc, true, NewNetsnmpTranslator()) require.NoError(t, err) - require.Equal(t, tb.Name, "mytable") + require.Equal(t, "mytable", tb.Name) rtr1 := RTableRow{ Tags: map[string]string{ "myfield1": "instance", diff --git a/plugins/inputs/stackdriver/stackdriver_test.go b/plugins/inputs/stackdriver/stackdriver_test.go index 6f4eb88486a5d..98be7a0246c07 100644 --- a/plugins/inputs/stackdriver/stackdriver_test.go +++ b/plugins/inputs/stackdriver/stackdriver_test.go @@ -748,7 +748,7 @@ func TestGather(t *testing.T) { err := s.Gather(&acc) require.NoError(t, err) - require.Equalf(t, len(acc.Errors) > 0, tt.wantAccErr, + require.Equalf(t, tt.wantAccErr, len(acc.Errors) > 0, "Accumulator errors. got=%v, want=%t", acc.Errors, tt.wantAccErr) actual := []telegraf.Metric{} diff --git a/plugins/inputs/statsd/statsd_test.go b/plugins/inputs/statsd/statsd_test.go index 8c405108e33dd..61744cbdf6168 100644 --- a/plugins/inputs/statsd/statsd_test.go +++ b/plugins/inputs/statsd/statsd_test.go @@ -1284,7 +1284,7 @@ func TestParse_MeasurementsWithMultipleValues(t *testing.T) { cachedtiming, ok := sSingle.timings["metric_type=timingvalid_multiple"] require.Truef(t, ok, "Expected cached measurement with hash 'metric_type=timingvalid_multiple' not found") - require.Equalf(t, cachedtiming.name, "valid_multiple", "Expected the name to be 'valid_multiple', got %s", cachedtiming.name) + require.Equalf(t, "valid_multiple", cachedtiming.name, "Expected the name to be 'valid_multiple', got %s", cachedtiming.name) // A 0 at samplerate 0.1 will add 10 values of 0, // A 0 with invalid samplerate will add a single 0, @@ -1617,12 +1617,12 @@ func TestParse_Counters_Delete(t *testing.T) { func TestParseKeyValue(t *testing.T) { k, v := parseKeyValue("foo=bar") - require.Equalf(t, k, "foo", "Expected %s, got %s", "foo", k) - require.Equalf(t, v, "bar", "Expected %s, got %s", "bar", v) + require.Equalf(t, "foo", k, "Expected %s, got %s", "foo", k) + require.Equalf(t, "bar", v, "Expected %s, got %s", "bar", v) k2, v2 := parseKeyValue("baz") - require.Equalf(t, k2, "", "Expected %s, got %s", "", k2) - require.Equalf(t, v2, "baz", "Expected %s, got %s", "baz", v2) + require.Equalf(t, "", k2, "Expected %s, got %s", "", k2) + require.Equalf(t, "baz", v2, "Expected %s, got %s", "baz", v2) } // Test utility functions @@ -1849,7 +1849,7 @@ func TestParse_Ints(t *testing.T) { acc := &testutil.Accumulator{} require.NoError(t, s.Gather(acc)) - require.Equal(t, s.Percentiles, []Number{90.0}) + require.Equal(t, []Number{90.0}, s.Percentiles) } func TestParse_KeyValue(t *testing.T) { diff --git a/plugins/inputs/unbound/unbound_test.go b/plugins/inputs/unbound/unbound_test.go index e9994d7ebe4d6..f1ec205934b04 100644 --- a/plugins/inputs/unbound/unbound_test.go +++ b/plugins/inputs/unbound/unbound_test.go @@ -27,7 +27,7 @@ func TestParseFullOutput(t *testing.T) { require.True(t, acc.HasMeasurement("unbound")) require.Len(t, acc.Metrics, 1) - require.Equal(t, acc.NFields(), 63) + require.Equal(t, 63, acc.NFields()) acc.AssertContainsFields(t, "unbound", parsedFullOutput) } @@ -46,7 +46,7 @@ func TestParseFullOutputThreadAsTag(t *testing.T) { require.True(t, acc.HasMeasurement("unbound_threads")) require.Len(t, acc.Metrics, 2) - require.Equal(t, acc.NFields(), 63) + require.Equal(t, 63, acc.NFields()) acc.AssertContainsFields(t, "unbound", parsedFullOutputThreadAsTagMeasurementUnbound) acc.AssertContainsFields(t, "unbound_threads", parsedFullOutputThreadAsTagMeasurementUnboundThreads) diff --git a/plugins/inputs/varnish/varnish_test.go b/plugins/inputs/varnish/varnish_test.go index 6a3b305134a5a..ca45e8c4c3094 100644 --- a/plugins/inputs/varnish/varnish_test.go +++ b/plugins/inputs/varnish/varnish_test.go @@ -558,7 +558,7 @@ func TestVersions(t *testing.T) { require.Len(t, acc.Metrics, c.size) for _, m := range acc.Metrics { require.NotEmpty(t, m.Fields) - require.Equal(t, m.Measurement, "varnish") + require.Equal(t, "varnish", m.Measurement) for field := range m.Fields { require.NotContains(t, field, "reload_") } @@ -627,11 +627,11 @@ func TestVarnishAdmJson(t *testing.T) { require.NoError(t, err) activeVcl, err := getActiveVCLJson(bytes.NewBuffer(admJSON)) require.NoError(t, err) - require.Equal(t, activeVcl, "boot-123") + require.Equal(t, "boot-123", activeVcl) admJSON, err = os.ReadFile("test_data/" + "varnishadm-reload.json") require.NoError(t, err) activeVcl, err = getActiveVCLJson(bytes.NewBuffer(admJSON)) require.NoError(t, err) - require.Equal(t, activeVcl, "reload_20210723_091821_2056185") + require.Equal(t, "reload_20210723_091821_2056185", activeVcl) } diff --git a/plugins/inputs/vault/vault_test.go b/plugins/inputs/vault/vault_test.go index b0d051c79d2c7..fde45c790bd5f 100644 --- a/plugins/inputs/vault/vault_test.go +++ b/plugins/inputs/vault/vault_test.go @@ -241,5 +241,5 @@ func TestIntegration(t *testing.T) { require.NoError(t, plugin.Gather(&acc)) actual := acc.GetTelegrafMetrics() - testutil.RequireMetricsStructureEqual(t, expected, actual, options...) + testutil.RequireMetricsStructureSubset(t, expected, actual, options...) } diff --git a/plugins/inputs/vsphere/vsphere_test.go b/plugins/inputs/vsphere/vsphere_test.go index a6064be88d338..b933942b9e6e1 100644 --- a/plugins/inputs/vsphere/vsphere_test.go +++ b/plugins/inputs/vsphere/vsphere_test.go @@ -476,7 +476,7 @@ func TestDisconnectedServerBehavior(t *testing.T) { v.DisconnectedServersBehavior = "something else" _, err = NewEndpoint(context.Background(), v, u, v.Log) require.Error(t, err) - require.Equal(t, err.Error(), `"something else" is not a valid value for disconnected_servers_behavior`) + require.Equal(t, `"something else" is not a valid value for disconnected_servers_behavior`, err.Error()) } func testCollection(t *testing.T, excludeClusters bool) { diff --git a/plugins/inputs/win_perf_counters/win_perf_counters_test.go b/plugins/inputs/win_perf_counters/win_perf_counters_test.go index 6ed499f0d41e7..40593475db690 100644 --- a/plugins/inputs/win_perf_counters/win_perf_counters_test.go +++ b/plugins/inputs/win_perf_counters/win_perf_counters_test.go @@ -316,7 +316,7 @@ func TestCounterPathParsing(t *testing.T) { for path, vals := range counterPathsAndRes { h, o, i, c, err := extractCounterInfoFromCounterPath(path) require.NoError(t, err) - require.Equalf(t, vals, []string{h, o, i, c}, "arrays: %#v and %#v are not equal", vals, []string{o, i, c}) + require.Equalf(t, []string{h, o, i, c}, vals, "arrays: %#v and %#v are not equal", vals, []string{o, i, c}) } for _, path := range invalidCounterPaths { _, _, _, _, err := extractCounterInfoFromCounterPath(path) diff --git a/plugins/inputs/wireless/wireless_test.go b/plugins/inputs/wireless/wireless_test.go index f16c80ea6f8ad..71904c56625c7 100644 --- a/plugins/inputs/wireless/wireless_test.go +++ b/plugins/inputs/wireless/wireless_test.go @@ -52,5 +52,5 @@ func TestLoadWirelessTable(t *testing.T) { require.NoError(t, err) as := require.New(t) - as.Equal(metrics, expectedMetrics) + as.Equal(expectedMetrics, metrics) } diff --git a/plugins/inputs/x509_cert/x509_cert_test.go b/plugins/inputs/x509_cert/x509_cert_test.go index a2ebab51bd69a..46d2aeea25f42 100644 --- a/plugins/inputs/x509_cert/x509_cert_test.go +++ b/plugins/inputs/x509_cert/x509_cert_test.go @@ -260,7 +260,7 @@ func TestGatherExcludeRootCerts(t *testing.T) { require.NoError(t, sc.Gather(&acc)) require.True(t, acc.HasMeasurement("x509_cert")) - require.Equal(t, acc.NMetrics(), uint64(1)) + require.Equal(t, uint64(1), acc.NMetrics()) } func TestGatherChain(t *testing.T) { diff --git a/plugins/outputs/cratedb/cratedb_test.go b/plugins/outputs/cratedb/cratedb_test.go index 20d84f3bd5f3e..d1fdf1688308d 100644 --- a/plugins/outputs/cratedb/cratedb_test.go +++ b/plugins/outputs/cratedb/cratedb_test.go @@ -168,14 +168,14 @@ func TestEscapeValue(t *testing.T) { for _, test := range tests { got, err := escapeValue(test.Value, "_") require.NoError(t, err, "value: %#v", test.Value) - require.Equal(t, got, test.Want) + require.Equal(t, test.Want, got) } } func TestCircumventingStringEscape(t *testing.T) { value, err := escapeObject(map[string]interface{}{"a.b": "c"}, `_"`) require.NoError(t, err) - require.Equal(t, value, `{"a_""b" = 'c'}`) + require.Equal(t, `{"a_""b" = 'c'}`, value) } func Test_hashID(t *testing.T) { diff --git a/plugins/outputs/elasticsearch/elasticsearch.go b/plugins/outputs/elasticsearch/elasticsearch.go index 0ace4816df86e..c639e46eb94b8 100644 --- a/plugins/outputs/elasticsearch/elasticsearch.go +++ b/plugins/outputs/elasticsearch/elasticsearch.go @@ -316,8 +316,9 @@ func (a *Elasticsearch) Write(metrics []telegraf.Metric) error { if res.Errors { for id, err := range res.Failed() { a.Log.Errorf( - "Elasticsearch indexing failure, id: %d, error: %s, caused by: %s, %s", + "Elasticsearch indexing failure, id: %d, status: %d, error: %s, caused by: %s, %s", id, + err.Status, err.Error.Reason, err.Error.CausedBy["reason"], err.Error.CausedBy["type"], diff --git a/plugins/outputs/graylog/graylog_test.go b/plugins/outputs/graylog/graylog_test.go index 802befa574f49..cdeb78a60c693 100644 --- a/plugins/outputs/graylog/graylog_test.go +++ b/plugins/outputs/graylog/graylog_test.go @@ -37,15 +37,15 @@ func TestSerializer(t *testing.T) { err = json.Unmarshal([]byte(r), &obj) require.NoError(t, err) - require.Equal(t, obj["version"], "1.1") - require.Equal(t, obj["_name"], "testing") - require.Equal(t, obj["_verb"], "GET") - require.Equal(t, obj["host"], "hostname") - require.Equal(t, obj["full_message"], "full") - require.Equal(t, obj["short_message"], "short") - require.Equal(t, obj["level"], "1") - require.Equal(t, obj["facility"], "demo") - require.Equal(t, obj["line"], "42") - require.Equal(t, obj["file"], "graylog.go") + require.Equal(t, "1.1", obj["version"]) + require.Equal(t, "testing", obj["_name"]) + require.Equal(t, "GET", obj["_verb"]) + require.Equal(t, "hostname", obj["host"]) + require.Equal(t, "full", obj["full_message"]) + require.Equal(t, "short", obj["short_message"]) + require.Equal(t, "1", obj["level"]) + require.Equal(t, "demo", obj["facility"]) + require.Equal(t, "42", obj["line"]) + require.Equal(t, "graylog.go", obj["file"]) } } diff --git a/plugins/outputs/graylog/graylog_test_linux.go b/plugins/outputs/graylog/graylog_test_linux.go index 563f81ac14e39..1a50086cffa6f 100644 --- a/plugins/outputs/graylog/graylog_test_linux.go +++ b/plugins/outputs/graylog/graylog_test_linux.go @@ -16,10 +16,11 @@ import ( "testing" "time" + "github.com/stretchr/testify/require" + "github.com/influxdata/telegraf/config" tlsint "github.com/influxdata/telegraf/plugins/common/tls" "github.com/influxdata/telegraf/testutil" - "github.com/stretchr/testify/require" ) func TestWriteUDP(t *testing.T) { @@ -170,14 +171,14 @@ func UDPServer(t *testing.T, wg *sync.WaitGroup, namefieldnoprefix bool) string if err != nil { return err } - require.Equal(t, obj["short_message"], "telegraf") + require.Equal(t, "telegraf", obj["short_message"]) if namefieldnoprefix { - require.Equal(t, obj["name"], "test1") + require.Equal(t, "test1", obj["name"]) } else { - require.Equal(t, obj["_name"], "test1") + require.Equal(t, "test1", obj["_name"]) } - require.Equal(t, obj["_tag1"], "value1") - require.Equal(t, obj["_value"], float64(1)) + require.Equal(t, "value1", obj["_tag1"]) + require.Equal(t, float64(1), obj["_value"]) return nil } @@ -247,10 +248,10 @@ func TCPServer(t *testing.T, wg *sync.WaitGroup, tlsConfig *tls.Config, errs cha var obj GelfObject err = json.Unmarshal(bufW.Bytes(), &obj) require.NoError(t, err) - require.Equal(t, obj["short_message"], "telegraf") - require.Equal(t, obj["_name"], "test1") - require.Equal(t, obj["_tag1"], "value1") - require.Equal(t, obj["_value"], float64(1)) + require.Equal(t, "telegraf", obj["short_message"]) + require.Equal(t, "test1", obj["_name"]) + require.Equal(t, "value1", obj["_tag1"]) + require.Equal(t, float64(1), obj["_value"]) return nil } diff --git a/plugins/outputs/influxdb/http_test.go b/plugins/outputs/influxdb/http_test.go index 0b9cfd399a862..316b007270f01 100644 --- a/plugins/outputs/influxdb/http_test.go +++ b/plugins/outputs/influxdb/http_test.go @@ -119,8 +119,8 @@ func TestHTTP_CreateDatabase(t *testing.T) { }, database: `a " b`, queryHandlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.Header.Get("A"), "B") - require.Equal(t, r.Header.Get("C"), "D") + require.Equal(t, "B", r.Header.Get("A")) + require.Equal(t, "D", r.Header.Get("C")) w.WriteHeader(http.StatusOK) _, err = w.Write(successResponse) require.NoError(t, err) @@ -137,8 +137,8 @@ func TestHTTP_CreateDatabase(t *testing.T) { Database: "telegraf", }, queryHandlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.Header.Get("A"), "B") - require.Equal(t, r.Header.Get("C"), "D") + require.Equal(t, "B", r.Header.Get("A")) + require.Equal(t, "D", r.Header.Get("C")) w.WriteHeader(http.StatusOK) _, err = w.Write(successResponse) require.NoError(t, err) @@ -291,7 +291,7 @@ func TestHTTP_Write(t *testing.T) { Log: testutil.Logger{}, }, queryHandlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") + require.Equal(t, "telegraf", r.FormValue("db")) body, err := io.ReadAll(r.Body) require.NoError(t, err) require.Contains(t, string(body), "cpu value=42") @@ -324,7 +324,7 @@ func TestHTTP_Write(t *testing.T) { Log: testutil.Logger{}, }, queryHandlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.Header.Get("User-Agent"), "telegraf") + require.Equal(t, "telegraf", r.Header.Get("User-Agent")) w.WriteHeader(http.StatusNoContent) }, }, @@ -362,8 +362,8 @@ func TestHTTP_Write(t *testing.T) { Log: testutil.Logger{}, }, queryHandlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.Header.Get("A"), "B") - require.Equal(t, r.Header.Get("C"), "D") + require.Equal(t, "B", r.Header.Get("A")) + require.Equal(t, "D", r.Header.Get("C")) w.WriteHeader(http.StatusNoContent) }, }, @@ -581,7 +581,7 @@ func TestHTTP_WriteContentEncodingGzip(t *testing.T) { http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { case "/write": - require.Equal(t, r.Header.Get("Content-Encoding"), "gzip") + require.Equal(t, "gzip", r.Header.Get("Content-Encoding")) gr, err := gzip.NewReader(r.Body) require.NoError(t, err) @@ -709,7 +709,7 @@ func TestHTTP_WriteDatabaseTagWorksOnRetry(t *testing.T) { case "/write": err := r.ParseForm() require.NoError(t, err) - require.Equal(t, r.Form["db"], []string{"foo"}) + require.Equal(t, []string{"foo"}, r.Form["db"]) body, err := io.ReadAll(r.Body) require.NoError(t, err) @@ -794,8 +794,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "", r.FormValue("rp")) w.WriteHeader(http.StatusNoContent) }, }, @@ -817,8 +817,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "foo") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "foo", r.FormValue("rp")) w.WriteHeader(http.StatusNoContent) }, }, @@ -844,8 +844,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "foo") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "foo", r.FormValue("rp")) body, err := io.ReadAll(r.Body) require.NoError(t, err) require.Contains(t, string(body), "cpu,rp=foo value=42") @@ -873,8 +873,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "foo") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "foo", r.FormValue("rp")) w.WriteHeader(http.StatusNoContent) }, }, @@ -898,8 +898,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "", r.FormValue("rp")) w.WriteHeader(http.StatusNoContent) }, }, @@ -926,8 +926,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "foo") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "foo", r.FormValue("rp")) body, err := io.ReadAll(r.Body) require.NoError(t, err) require.Contains(t, string(body), "cpu value=42") @@ -957,8 +957,8 @@ func TestDBRPTags(t *testing.T) { ), }, handlerFunc: func(t *testing.T, w http.ResponseWriter, r *http.Request) { - require.Equal(t, r.FormValue("db"), "telegraf") - require.Equal(t, r.FormValue("rp"), "foo") + require.Equal(t, "telegraf", r.FormValue("db")) + require.Equal(t, "foo", r.FormValue("rp")) body, err := io.ReadAll(r.Body) require.NoError(t, err) require.Contains(t, string(body), "cpu,rp=foo value=42") diff --git a/plugins/outputs/influxdb_v2/http_test.go b/plugins/outputs/influxdb_v2/http_test.go index 2985400e0d246..77dd23e80e652 100644 --- a/plugins/outputs/influxdb_v2/http_test.go +++ b/plugins/outputs/influxdb_v2/http_test.go @@ -71,7 +71,7 @@ func TestWrite(t *testing.T) { case "/api/v2/write": err := r.ParseForm() require.NoError(t, err) - require.Equal(t, r.Form["bucket"], []string{"foobar"}) + require.Equal(t, []string{"foobar"}, r.Form["bucket"]) body, err := io.ReadAll(r.Body) require.NoError(t, err) @@ -131,7 +131,7 @@ func TestWriteBucketTagWorksOnRetry(t *testing.T) { case "/api/v2/write": err := r.ParseForm() require.NoError(t, err) - require.Equal(t, r.Form["bucket"], []string{"foo"}) + require.Equal(t, []string{"foo"}, r.Form["bucket"]) body, err := io.ReadAll(r.Body) require.NoError(t, err) diff --git a/plugins/outputs/kafka/kafka_test.go b/plugins/outputs/kafka/kafka_test.go index a45f9a32c4391..57608811f0dde 100644 --- a/plugins/outputs/kafka/kafka_test.go +++ b/plugins/outputs/kafka/kafka_test.go @@ -1,12 +1,14 @@ package kafka import ( + "context" "testing" "time" "github.com/Shopify/sarama" "github.com/docker/go-connections/nat" "github.com/stretchr/testify/require" + "github.com/testcontainers/testcontainers-go" "github.com/testcontainers/testcontainers-go/wait" "github.com/influxdata/telegraf" @@ -25,10 +27,26 @@ func TestConnectAndWriteIntegration(t *testing.T) { t.Skip("Skipping integration test in short mode") } + ctx := context.Background() + t.Log("creating test network") + networkName := "telegraf-test-output-kafka-network" + network, err := testcontainers.GenericNetwork(ctx, testcontainers.GenericNetworkRequest{ + NetworkRequest: testcontainers.NetworkRequest{ + Name: networkName, + Attachable: true, + CheckDuplicate: true, + }, + }) + require.NoError(t, err) + defer func() { + require.NoError(t, network.Remove(ctx), "terminating network failed") + }() + // Start the container as broker AND controller container := testutil.Container{ Image: "bitnami/kafka", Hostname: "localhost", // required to be able to resolve the name + Networks: []string{networkName}, ExposedPorts: []string{"9092:9092", "9093:9093"}, Env: map[string]string{ "KAFKA_CFG_NODE_ID": "0", diff --git a/plugins/outputs/stackdriver/stackdriver_test.go b/plugins/outputs/stackdriver/stackdriver_test.go index 4b78d09c26df9..120d2338b35f0 100644 --- a/plugins/outputs/stackdriver/stackdriver_test.go +++ b/plugins/outputs/stackdriver/stackdriver_test.go @@ -118,8 +118,8 @@ func TestWrite(t *testing.T) { require.NoError(t, err) request := mockMetric.reqs[0].(*monitoringpb.CreateTimeSeriesRequest) - require.Equal(t, request.TimeSeries[0].Resource.Type, "global") - require.Equal(t, request.TimeSeries[0].Resource.Labels["project_id"], "projects/[PROJECT]") + require.Equal(t, "global", request.TimeSeries[0].Resource.Type) + require.Equal(t, "projects/[PROJECT]", request.TimeSeries[0].Resource.Labels["project_id"]) } func TestWriteResourceTypeAndLabels(t *testing.T) { @@ -150,9 +150,9 @@ func TestWriteResourceTypeAndLabels(t *testing.T) { require.NoError(t, err) request := mockMetric.reqs[0].(*monitoringpb.CreateTimeSeriesRequest) - require.Equal(t, request.TimeSeries[0].Resource.Type, "foo") - require.Equal(t, request.TimeSeries[0].Resource.Labels["project_id"], "projects/[PROJECT]") - require.Equal(t, request.TimeSeries[0].Resource.Labels["mylabel"], "myvalue") + require.Equal(t, "foo", request.TimeSeries[0].Resource.Type) + require.Equal(t, "projects/[PROJECT]", request.TimeSeries[0].Resource.Labels["project_id"]) + require.Equal(t, "myvalue", request.TimeSeries[0].Resource.Labels["mylabel"]) } func TestWriteTagsAsResourceLabels(t *testing.T) { diff --git a/plugins/outputs/sumologic/sumologic_test.go b/plugins/outputs/sumologic/sumologic_test.go index 52ff988cef39f..52c1c5a6495c9 100644 --- a/plugins/outputs/sumologic/sumologic_test.go +++ b/plugins/outputs/sumologic/sumologic_test.go @@ -321,7 +321,7 @@ func TestContentEncodingGzip(t *testing.T) { payload, err := io.ReadAll(body) require.NoError(t, err) - require.Equal(t, string(payload), "metric=cpu field=value 42 0\n") + require.Equal(t, "metric=cpu field=value 42 0\n", string(payload)) w.WriteHeader(http.StatusNoContent) }) diff --git a/plugins/outputs/timestream/README.md b/plugins/outputs/timestream/README.md index 99bcfbd7d73b5..e1dec8b932c77 100644 --- a/plugins/outputs/timestream/README.md +++ b/plugins/outputs/timestream/README.md @@ -123,6 +123,12 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details. ## ``` +### Unsigned Integers + +Timestream does **DOES NOT** support unsigned int64 values. Values using uint64, +which are less than the maximum signed int64 are returned as expected. Any +larger value is caped at the maximum int64 value. + ### Batching Timestream WriteInputRequest.CommonAttributes are used to efficiently write data diff --git a/plugins/outputs/timestream/timestream.go b/plugins/outputs/timestream/timestream.go index 0d6762fabd2b3..db19cdbd8a958 100644 --- a/plugins/outputs/timestream/timestream.go +++ b/plugins/outputs/timestream/timestream.go @@ -6,6 +6,7 @@ import ( _ "embed" "errors" "fmt" + "math" "reflect" "strconv" "sync" @@ -609,7 +610,11 @@ func convertValue(v interface{}) (value string, valueType types.MeasureValueType value = strconv.FormatUint(uint64(t), 10) case uint64: valueType = types.MeasureValueTypeBigint - value = strconv.FormatUint(t, 10) + if t <= uint64(math.MaxInt64) { + value = strconv.FormatUint(t, 10) + } else { + value = strconv.FormatUint(math.MaxInt64, 10) + } case float32: valueType = types.MeasureValueTypeDouble value = strconv.FormatFloat(float64(t), 'f', -1, 32) diff --git a/plugins/outputs/timestream/timestream_test.go b/plugins/outputs/timestream/timestream_test.go index cbe08ccfbc089..5e6df4168107a 100644 --- a/plugins/outputs/timestream/timestream_test.go +++ b/plugins/outputs/timestream/timestream_test.go @@ -3,6 +3,7 @@ package timestream import ( "context" "fmt" + "math" "reflect" "sort" "strconv" @@ -268,14 +269,14 @@ func TestWriteMultiMeasuresSingleTableMode(t *testing.T) { for _, r := range result { transformedRecords = append(transformedRecords, r.Records...) // Assert that we use measure name from input - require.Equal(t, *r.Records[0].MeasureName, "multi_measure_name") + require.Equal(t, "multi_measure_name", *r.Records[0].MeasureName) } // Expected 101 records require.Len(t, transformedRecords, recordCount+1, "Expected 101 records after transforming") // validate write to TS err := plugin.Write(inputs) require.NoError(t, err, "Write to Timestream failed") - require.Equal(t, mockClient.WriteRecordsRequestCount, 2, "Expected 2 WriteRecords calls") + require.Equal(t, 2, mockClient.WriteRecordsRequestCount, "Expected 2 WriteRecords calls") } func TestWriteMultiMeasuresMultiTableMode(t *testing.T) { @@ -323,7 +324,7 @@ func TestWriteMultiMeasuresMultiTableMode(t *testing.T) { require.Len(t, result, 1, "Expected 1 WriteRecordsInput requests") // Assert that we use measure name from config - require.Equal(t, *result[0].Records[0].MeasureName, "config-multi-measure-name") + require.Equal(t, "config-multi-measure-name", *result[0].Records[0].MeasureName) var transformedRecords []types.Record for _, r := range result { @@ -341,7 +342,7 @@ func TestWriteMultiMeasuresMultiTableMode(t *testing.T) { // validate successful write to TS err = plugin.Write(inputs) require.NoError(t, err, "Write to Timestream failed") - require.Equal(t, mockClient.WriteRecordsRequestCount, 1, "Expected 1 WriteRecords call") + require.Equal(t, 1, mockClient.WriteRecordsRequestCount, "Expected 1 WriteRecords call") } func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) { @@ -381,6 +382,24 @@ func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) { time1, ) + input5 := testutil.MustMetric( + metricName1, + map[string]string{"tag5": "value5"}, + map[string]interface{}{ + "measureMaxUint64": uint64(math.MaxUint64), + }, + time1, + ) + + input6 := testutil.MustMetric( + metricName1, + map[string]string{"tag6": "value6"}, + map[string]interface{}{ + "measureSmallUint64": uint64(123456), + }, + time1, + ) + expectedResultMultiTable := buildExpectedMultiRecords("config-multi-measure-name", metricName1) plugin := Timestream{ @@ -396,7 +415,7 @@ func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) { require.NoError(t, err, "Invalid configuration") // validate multi-record generation with MappingModeMultiTable - result := plugin.TransformMetrics([]telegraf.Metric{input1, input2, input3, input4}) + result := plugin.TransformMetrics([]telegraf.Metric{input1, input2, input3, input4, input5, input6}) require.Len(t, result, 1, "Expected 1 WriteRecordsInput requests") require.EqualValues(t, result[0], expectedResultMultiTable) @@ -421,7 +440,7 @@ func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) { expectedResultSingleTable := buildExpectedMultiRecords(metricName1, "singleTableName") // validate multi-record generation with MappingModeSingleTable - result = plugin.TransformMetrics([]telegraf.Metric{input1, input2, input3, input4}) + result = plugin.TransformMetrics([]telegraf.Metric{input1, input2, input3, input4, input5, input6}) require.Len(t, result, 1, "Expected 1 WriteRecordsInput requests") require.EqualValues(t, result[0], expectedResultSingleTable) @@ -473,6 +492,28 @@ func buildExpectedMultiRecords(multiMeasureName string, tableName string) *times recordsMultiTableMode = append(recordsMultiTableMode, recordBool...) + recordMaxUint64 := buildMultiRecords([]SimpleInput{ + { + t: time1Epoch, + tableName: metricName1, + dimensions: map[string]string{"tag5": "value5"}, + measureValues: map[string]string{"measureMaxUint64": "9223372036854775807"}, + }, + }, multiMeasureName, types.MeasureValueTypeBigint) + + recordsMultiTableMode = append(recordsMultiTableMode, recordMaxUint64...) + + recordUint64 := buildMultiRecords([]SimpleInput{ + { + t: time1Epoch, + tableName: metricName1, + dimensions: map[string]string{"tag6": "value6"}, + measureValues: map[string]string{"measureSmallUint64": "123456"}, + }, + }, multiMeasureName, types.MeasureValueTypeBigint) + + recordsMultiTableMode = append(recordsMultiTableMode, recordUint64...) + expectedResultMultiTable := ×treamwrite.WriteRecordsInput{ DatabaseName: aws.String(tsDbName), TableName: aws.String(tableName), @@ -597,7 +638,7 @@ func TestWriteWhenRequestsGreaterThanMaxWriteGoRoutinesCount(t *testing.T) { err := plugin.Write(inputs) require.NoError(t, err, "Expected to write without any errors ") - require.Equal(t, mockClient.WriteRecordsRequestCount, maxWriteRecordsCalls, "Expected 5 calls to WriteRecords") + require.Equal(t, maxWriteRecordsCalls, mockClient.WriteRecordsRequestCount, "Expected 5 calls to WriteRecords") } func TestWriteWhenRequestsLesserThanMaxWriteGoRoutinesCount(t *testing.T) { @@ -636,7 +677,7 @@ func TestWriteWhenRequestsLesserThanMaxWriteGoRoutinesCount(t *testing.T) { err := plugin.Write(inputs) require.NoError(t, err, "Expected to write without any errors ") - require.Equal(t, mockClient.WriteRecordsRequestCount, maxWriteRecordsCalls, "Expected 5 calls to WriteRecords") + require.Equal(t, maxWriteRecordsCalls, mockClient.WriteRecordsRequestCount, "Expected 5 calls to WriteRecords") } func TestTransformMetricsSkipEmptyMetric(t *testing.T) { diff --git a/plugins/parsers/collectd/parser_test.go b/plugins/parsers/collectd/parser_test.go index e9108bac76acd..033a64c621165 100644 --- a/plugins/parsers/collectd/parser_test.go +++ b/plugins/parsers/collectd/parser_test.go @@ -112,7 +112,7 @@ func TestNewCollectdParser(t *testing.T) { ParseMultiValue: "join", } require.NoError(t, parser.Init()) - require.Equal(t, parser.popts.SecurityLevel, network.None) + require.Equal(t, network.None, parser.popts.SecurityLevel) require.NotNil(t, parser.popts.PasswordLookup) require.Nil(t, parser.popts.TypesDB) } diff --git a/plugins/parsers/csv/parser_test.go b/plugins/parsers/csv/parser_test.go index b26d46470950e..99582ee06b091 100644 --- a/plugins/parsers/csv/parser_test.go +++ b/plugins/parsers/csv/parser_test.go @@ -105,8 +105,8 @@ func TestTimestamp(t *testing.T) { metrics, err := p.Parse([]byte(testCSV)) require.NoError(t, err) - require.Equal(t, metrics[0].Time().UnixNano(), int64(1243094706000000000)) - require.Equal(t, metrics[1].Time().UnixNano(), int64(1257609906000000000)) + require.Equal(t, int64(1243094706000000000), metrics[0].Time().UnixNano()) + require.Equal(t, int64(1257609906000000000), metrics[1].Time().UnixNano()) } func TestTimestampYYYYMMDDHHmm(t *testing.T) { @@ -127,8 +127,8 @@ func TestTimestampYYYYMMDDHHmm(t *testing.T) { metrics, err := p.Parse([]byte(testCSV)) require.NoError(t, err) - require.Equal(t, metrics[0].Time().UnixNano(), int64(1243094700000000000)) - require.Equal(t, metrics[1].Time().UnixNano(), int64(1247328300000000000)) + require.Equal(t, int64(1243094700000000000), metrics[0].Time().UnixNano()) + require.Equal(t, int64(1247328300000000000), metrics[1].Time().UnixNano()) } func TestTimestampError(t *testing.T) { p := &Parser{ @@ -163,8 +163,8 @@ func TestTimestampUnixFormat(t *testing.T) { 1257609906,80,test_name2` metrics, err := p.Parse([]byte(testCSV)) require.NoError(t, err) - require.Equal(t, metrics[0].Time().UnixNano(), int64(1243094706000000000)) - require.Equal(t, metrics[1].Time().UnixNano(), int64(1257609906000000000)) + require.Equal(t, int64(1243094706000000000), metrics[0].Time().UnixNano()) + require.Equal(t, int64(1257609906000000000), metrics[1].Time().UnixNano()) } func TestTimestampUnixMSFormat(t *testing.T) { @@ -183,8 +183,8 @@ func TestTimestampUnixMSFormat(t *testing.T) { 1257609906123,80,test_name2` metrics, err := p.Parse([]byte(testCSV)) require.NoError(t, err) - require.Equal(t, metrics[0].Time().UnixNano(), int64(1243094706123000000)) - require.Equal(t, metrics[1].Time().UnixNano(), int64(1257609906123000000)) + require.Equal(t, int64(1243094706123000000), metrics[0].Time().UnixNano()) + require.Equal(t, int64(1257609906123000000), metrics[1].Time().UnixNano()) } func TestQuotedCharacter(t *testing.T) { @@ -673,8 +673,8 @@ func TestTimestampTimezone(t *testing.T) { metrics, err := p.Parse([]byte(testCSV)) require.NoError(t, err) - require.Equal(t, metrics[0].Time().UnixNano(), int64(1243094706000000000)) - require.Equal(t, metrics[1].Time().UnixNano(), int64(1257609906000000000)) + require.Equal(t, int64(1243094706000000000), metrics[0].Time().UnixNano()) + require.Equal(t, int64(1257609906000000000), metrics[1].Time().UnixNano()) } func TestEmptyMeasurementName(t *testing.T) { @@ -860,8 +860,8 @@ func TestParseMetadataSeparators(t *testing.T) { } err = p.Init() require.Error(t, err) - require.Equal(t, err.Error(), "initializing separators failed: "+ - "csv_metadata_separators required when specifying csv_metadata_rows") + require.Equal(t, "initializing separators failed: "+ + "csv_metadata_separators required when specifying csv_metadata_rows", err.Error()) p = &Parser{ ColumnNames: []string{"a", "b"}, MetadataRows: 1, @@ -871,7 +871,7 @@ func TestParseMetadataSeparators(t *testing.T) { require.NoError(t, err) require.Len(t, p.metadataSeparatorList, 4) require.Empty(t, p.MetadataTrimSet) - require.Equal(t, p.metadataSeparatorList, metadataPattern{":=", ",", "=", ":"}) + require.Equal(t, metadataPattern{":=", ",", "=", ":"}, p.metadataSeparatorList) p = &Parser{ ColumnNames: []string{"a", "b"}, MetadataRows: 1, @@ -882,7 +882,7 @@ func TestParseMetadataSeparators(t *testing.T) { require.NoError(t, err) require.Len(t, p.metadataSeparatorList, 4) require.Len(t, p.MetadataTrimSet, 3) - require.Equal(t, p.metadataSeparatorList, metadataPattern{":=", ",", ":", "="}) + require.Equal(t, metadataPattern{":=", ",", ":", "="}, p.metadataSeparatorList) } func TestParseMetadataRow(t *testing.T) { @@ -897,13 +897,13 @@ func TestParseMetadataRow(t *testing.T) { m := p.parseMetadataRow("# this is a not matching string") require.Nil(t, m) m = p.parseMetadataRow("# key1 : value1 \r\n") - require.Equal(t, m, map[string]string{"# key1 ": " value1 "}) + require.Equal(t, map[string]string{"# key1 ": " value1 "}, m) m = p.parseMetadataRow("key2=1234\n") - require.Equal(t, m, map[string]string{"key2": "1234"}) + require.Equal(t, map[string]string{"key2": "1234"}, m) m = p.parseMetadataRow(" file created : 2021-10-08T12:34:18+10:00 \r\n") - require.Equal(t, m, map[string]string{" file created ": " 2021-10-08T12:34:18+10:00 "}) + require.Equal(t, map[string]string{" file created ": " 2021-10-08T12:34:18+10:00 "}, m) m = p.parseMetadataRow("file created: 2021-10-08T12:34:18\t\r\r\n") - require.Equal(t, m, map[string]string{"file created": " 2021-10-08T12:34:18\t"}) + require.Equal(t, map[string]string{"file created": " 2021-10-08T12:34:18\t"}, m) p = &Parser{ ColumnNames: []string{"a", "b"}, MetadataRows: 5, @@ -916,13 +916,13 @@ func TestParseMetadataRow(t *testing.T) { m = p.parseMetadataRow("# this is a not matching string") require.Nil(t, m) m = p.parseMetadataRow("# key1 : value1 \r\n") - require.Equal(t, m, map[string]string{"key1": "value1"}) + require.Equal(t, map[string]string{"key1": "value1"}, m) m = p.parseMetadataRow("key2=1234\n") - require.Equal(t, m, map[string]string{"key2": "1234"}) + require.Equal(t, map[string]string{"key2": "1234"}, m) m = p.parseMetadataRow(" file created : 2021-10-08T12:34:18+10:00 \r\n") - require.Equal(t, m, map[string]string{"file created": "2021-10-08T12:34:18+10:00"}) + require.Equal(t, map[string]string{"file created": "2021-10-08T12:34:18+10:00"}, m) m = p.parseMetadataRow("file created: '2021-10-08T12:34:18'\r\n") - require.Equal(t, m, map[string]string{"file created": "2021-10-08T12:34:18"}) + require.Equal(t, map[string]string{"file created": "2021-10-08T12:34:18"}, m) } func TestParseCSVFileWithMetadata(t *testing.T) { diff --git a/plugins/parsers/grok/parser_test.go b/plugins/parsers/grok/parser_test.go index 06bdf2bcc18cd..785ecca24e7f4 100644 --- a/plugins/parsers/grok/parser_test.go +++ b/plugins/parsers/grok/parser_test.go @@ -1089,7 +1089,7 @@ func TestDynamicMeasurementModifier(t *testing.T) { require.NoError(t, p.Compile()) m, err := p.ParseLine("4 5 hello") require.NoError(t, err) - require.Equal(t, m.Name(), "hello") + require.Equal(t, "hello", m.Name()) } func TestStaticMeasurementModifier(t *testing.T) { @@ -1114,7 +1114,7 @@ func TestTwoMeasurementModifier(t *testing.T) { require.NoError(t, p.Compile()) m, err := p.ParseLine("4 5 hello") require.NoError(t, err) - require.Equal(t, m.Name(), "4 5 hello") + require.Equal(t, "4 5 hello", m.Name()) } func TestMeasurementModifierNoName(t *testing.T) { @@ -1126,7 +1126,7 @@ func TestMeasurementModifierNoName(t *testing.T) { require.NoError(t, p.Compile()) m, err := p.ParseLine("4 5 hello") require.NoError(t, err) - require.Equal(t, m.Name(), "hello") + require.Equal(t, "hello", m.Name()) } func TestEmptyYearInTimestamp(t *testing.T) { diff --git a/plugins/parsers/json/parser_test.go b/plugins/parsers/json/parser_test.go index 5ea82fa2ac906..affd4d9e10823 100644 --- a/plugins/parsers/json/parser_test.go +++ b/plugins/parsers/json/parser_test.go @@ -672,7 +672,7 @@ func TestUseCaseJSONQuery(t *testing.T) { actual, err := parser.Parse([]byte(testString)) require.NoError(t, err) require.Len(t, actual, 3) - require.Equal(t, actual[0].Fields()["last"], "Murphy") + require.Equal(t, "Murphy", actual[0].Fields()["last"]) } func TestTimeParser(t *testing.T) { diff --git a/plugins/parsers/xpath/parser_test.go b/plugins/parsers/xpath/parser_test.go index e8ba234ae3a39..a8359cad4b030 100644 --- a/plugins/parsers/xpath/parser_test.go +++ b/plugins/parsers/xpath/parser_test.go @@ -1184,7 +1184,7 @@ func TestEmptySelection(t *testing.T) { _, err := parser.Parse([]byte(tt.input)) require.Error(t, err) - require.Equal(t, err.Error(), "cannot parse with empty selection node") + require.Equal(t, "cannot parse with empty selection node", err.Error()) }) } } diff --git a/plugins/processors/ifname/ttl_cache_test.go b/plugins/processors/ifname/ttl_cache_test.go index 5e6cae32e0d3f..6f16756d8d4cb 100644 --- a/plugins/processors/ifname/ttl_cache_test.go +++ b/plugins/processors/ifname/ttl_cache_test.go @@ -24,7 +24,7 @@ func TestTTLCacheExpire(t *testing.T) { _, ok, _ := c.Get("ones") require.False(t, ok) require.Empty(t, c.lru.m) - require.Equal(t, c.lru.l.Len(), 0) + require.Equal(t, 0, c.lru.l.Len()) } func TestTTLCache(t *testing.T) { diff --git a/plugins/serializers/graphite/graphite_test.go b/plugins/serializers/graphite/graphite_test.go index 3338eff61edbf..2882587ca2b22 100644 --- a/plugins/serializers/graphite/graphite_test.go +++ b/plugins/serializers/graphite/graphite_test.go @@ -209,7 +209,7 @@ func TestSerializeMetricHostWithMultipleTemplatesWithDefault(t *testing.T) { }, } require.NoError(t, s.Init()) - require.Equal(t, s.Template, "tags.host.measurement.field") + require.Equal(t, "tags.host.measurement.field", s.Template) buf, err := s.Serialize(m1) require.NoError(t, err) diff --git a/plugins/serializers/msgpack/metric_test.go b/plugins/serializers/msgpack/metric_test.go index e85fe4a020feb..db084470434cd 100644 --- a/plugins/serializers/msgpack/metric_test.go +++ b/plugins/serializers/msgpack/metric_test.go @@ -15,7 +15,7 @@ func TestMsgPackTime32(t *testing.T) { var nsec int64 t1 := MessagePackTime{time: time.Unix(sec, nsec)} - require.Equal(t, t1.Len(), 4) + require.Equal(t, 4, t1.Len()) buf := make([]byte, t1.Len()) require.NoError(t, t1.MarshalBinaryTo(buf)) @@ -33,7 +33,7 @@ func TestMsgPackTime64(t *testing.T) { var nsec int64 = 999999999 t1 := MessagePackTime{time: time.Unix(sec, nsec)} - require.Equal(t, t1.Len(), 8) + require.Equal(t, 8, t1.Len()) buf := make([]byte, t1.Len()) require.NoError(t, t1.MarshalBinaryTo(buf)) @@ -51,7 +51,7 @@ func TestMsgPackTime96(t *testing.T) { var nsec int64 = 111111111 t1 := MessagePackTime{time: time.Unix(sec, nsec)} - require.Equal(t, t1.Len(), 12) + require.Equal(t, 12, t1.Len()) buf := make([]byte, t1.Len()) require.NoError(t, t1.MarshalBinaryTo(buf)) @@ -65,7 +65,7 @@ func TestMsgPackTime96(t *testing.T) { // Testing the default value: 0001-01-01T00:00:00Z t1 = MessagePackTime{} - require.Equal(t, t1.Len(), 12) + require.Equal(t, 12, t1.Len()) require.NoError(t, t1.MarshalBinaryTo(buf)) t2 = new(MessagePackTime) diff --git a/plugins/serializers/template/template_test.go b/plugins/serializers/template/template_test.go index 5fea65d3c3a5f..1ee3bd6b655b7 100644 --- a/plugins/serializers/template/template_test.go +++ b/plugins/serializers/template/template_test.go @@ -170,13 +170,12 @@ func TestSerializeBatch(t *testing.T) { require.NoError(t, err) require.Equal( t, - string(buf), `0: cpu 42 1: cpu 42 -`, +`, string(buf), ) // A batch template should still work when serializing a single metric singleBuf, err := s.Serialize(m) require.NoError(t, err) - require.Equal(t, string(singleBuf), "0: cpu 42\n") + require.Equal(t, "0: cpu 42\n", string(singleBuf)) } diff --git a/testutil/metric.go b/testutil/metric.go index cb59eaa9e626d..123f8f84afc42 100644 --- a/testutil/metric.go +++ b/testutil/metric.go @@ -244,6 +244,49 @@ func RequireMetricsEqual(t testing.TB, expected, actual []telegraf.Metric, opts } } +// RequireMetricsSubset halts the test with an error if the expected array +// of metrics is not a subset of the actual metrics. +func RequireMetricsSubset(t testing.TB, expected, actual []telegraf.Metric, opts ...cmp.Option) { + if x, ok := t.(helper); ok { + x.Helper() + } + + lhs := make([]*metricDiff, 0, len(expected)) + for _, m := range expected { + lhs = append(lhs, newMetricDiff(m)) + } + rhs := make([]*metricDiff, 0, len(actual)) + for _, m := range actual { + rhs = append(rhs, newMetricDiff(m)) + } + + // Sort the metrics + sort.SliceStable(lhs, func(i, j int) bool { + return lessFunc(lhs[i], lhs[j]) + }) + sort.SliceStable(rhs, func(i, j int) bool { + return lessFunc(rhs[i], rhs[j]) + }) + + // Filter the right-hand-side (aka actual) by being contained in the + // left-hand-side (aka expected). + rhsFiltered := make([]*metricDiff, 0, len(rhs)) + for _, r := range rhs { + // Find the next element in the sorted list that might match + for _, l := range lhs { + if cmp.Equal(l, r, opts...) { + rhsFiltered = append(rhsFiltered, r) + break + } + } + } + + opts = append(opts, cmpopts.EquateNaNs()) + if diff := cmp.Diff(lhs, rhsFiltered, opts...); diff != "" { + t.Fatalf("[]telegraf.Metric\n--- expected\n+++ actual\n%s", diff) + } +} + // RequireMetricsStructureEqual halts the test with an error if the array of // metrics is structural different. Structure means that the metric differs // in either name, tag key/values, time (if not ignored) or fields. For fields @@ -268,6 +311,51 @@ func RequireMetricsStructureEqual(t testing.TB, expected, actual []telegraf.Metr } } +// RequireMetricsStructureSubset halts the test with an error if the expected +// array of metrics is not a subset of the actual metrics. The equality here +// is only based on the structure (i.e. key name and value types) and NOT on +// the actual value. +func RequireMetricsStructureSubset(t testing.TB, expected, actual []telegraf.Metric, opts ...cmp.Option) { + if x, ok := t.(helper); ok { + x.Helper() + } + + lhs := make([]*metricDiff, 0, len(expected)) + for _, m := range expected { + lhs = append(lhs, newMetricStructureDiff(m)) + } + rhs := make([]*metricDiff, 0, len(actual)) + for _, m := range actual { + rhs = append(rhs, newMetricStructureDiff(m)) + } + + // Sort the metrics + sort.SliceStable(lhs, func(i, j int) bool { + return lessFunc(lhs[i], lhs[j]) + }) + sort.SliceStable(rhs, func(i, j int) bool { + return lessFunc(rhs[i], rhs[j]) + }) + + // Filter the right-hand-side (aka actual) by being contained in the + // left-hand-side (aka expected). + rhsFiltered := make([]*metricDiff, 0, len(rhs)) + for _, r := range rhs { + // Find the next element in the sorted list that might match + for _, l := range lhs { + if cmp.Equal(l, r, opts...) { + rhsFiltered = append(rhsFiltered, r) + break + } + } + } + + opts = append(opts, cmpopts.EquateNaNs()) + if diff := cmp.Diff(lhs, rhsFiltered, opts...); diff != "" { + t.Fatalf("[]telegraf.Metric\n--- expected\n+++ actual\n%s", diff) + } +} + // MustMetric creates a new metric. func MustMetric( name string, diff --git a/testutil/metric_test.go b/testutil/metric_test.go index e84fc569ed638..c0cb458011788 100644 --- a/testutil/metric_test.go +++ b/testutil/metric_test.go @@ -104,3 +104,156 @@ func TestRequireMetricsEqual(t *testing.T) { }) } } + +func TestRequireMetricsSubset(t *testing.T) { + tests := []struct { + name string + got []telegraf.Metric + want []telegraf.Metric + opts []cmp.Option + }{ + { + name: "subset of metrics", + got: []telegraf.Metric{ + MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{"value": float64(3.14)}, + time.Unix(0, 0), + ), + MustMetric( + "net", + map[string]string{}, + map[string]interface{}{"value": int64(42)}, + time.Unix(0, 0), + ), + MustMetric( + "superfluous", + map[string]string{}, + map[string]interface{}{"value": true}, + time.Unix(0, 0), + ), + }, + want: []telegraf.Metric{ + MustMetric( + "net", + map[string]string{}, + map[string]interface{}{"value": int64(42)}, + time.Unix(0, 0), + ), + MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{"value": float64(3.14)}, + time.Unix(0, 0), + ), + }, + opts: []cmp.Option{SortMetrics()}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + RequireMetricsSubset(t, tt.want, tt.got, tt.opts...) + }) + } +} + +func TestRequireMetricsStructureEqual(t *testing.T) { + tests := []struct { + name string + got []telegraf.Metric + want []telegraf.Metric + opts []cmp.Option + }{ + { + name: "compare structure", + got: []telegraf.Metric{ + MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{"value": float64(3.14)}, + time.Unix(0, 0), + ), + MustMetric( + "net", + map[string]string{}, + map[string]interface{}{"value": int64(42)}, + time.Unix(0, 0), + ), + }, + want: []telegraf.Metric{ + MustMetric( + "net", + map[string]string{}, + map[string]interface{}{"value": int64(0)}, + time.Unix(0, 0), + ), + MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{"value": float64(0)}, + time.Unix(0, 0), + ), + }, + opts: []cmp.Option{SortMetrics()}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + RequireMetricsStructureEqual(t, tt.want, tt.got, tt.opts...) + }) + } +} + +func TestRequireMetricsStructureSubset(t *testing.T) { + tests := []struct { + name string + got []telegraf.Metric + want []telegraf.Metric + opts []cmp.Option + }{ + { + name: "subset of metric structure", + got: []telegraf.Metric{ + MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{"value": float64(3.14)}, + time.Unix(0, 0), + ), + MustMetric( + "net", + map[string]string{}, + map[string]interface{}{"value": int64(42)}, + time.Unix(0, 0), + ), + MustMetric( + "superfluous", + map[string]string{}, + map[string]interface{}{"value": true}, + time.Unix(0, 0), + ), + }, + want: []telegraf.Metric{ + MustMetric( + "net", + map[string]string{}, + map[string]interface{}{"value": int64(0)}, + time.Unix(0, 0), + ), + MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{"value": float64(0)}, + time.Unix(0, 0), + ), + }, + opts: []cmp.Option{SortMetrics()}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + RequireMetricsStructureSubset(t, tt.want, tt.got, tt.opts...) + }) + } +}