Skip to content

Commit

Permalink
adopt to the latest mdatagen changes
Browse files Browse the repository at this point in the history
  • Loading branch information
dmitryax committed Jul 31, 2023
1 parent a08e236 commit afd4aa8
Show file tree
Hide file tree
Showing 10 changed files with 148 additions and 17 deletions.
1 change: 1 addition & 0 deletions internal/receiver/databricksreceiver/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ func newReceiverFactory() receiver.CreateMetricsFunc {
rmp: databricks.NewRunMetricsProvider(dbrsvc),
dbrmp: databricks.MetricsProvider{Svc: dbrsvc},
metricsBuilder: metadata.NewMetricsBuilder(dbrcfg.MetricsBuilderConfig, settings),
resourceBuilder: metadata.NewResourceBuilder(dbrcfg.MetricsBuilderConfig.ResourceAttributes),
scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc},
semb: spark.ExtraMetricsBuilder{
Ssvc: ssvc,
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import (
"go.opentelemetry.io/collector/pdata/pmetric"

"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
)

func TestStripSparkMetricKey(t *testing.T) {
Expand All @@ -47,7 +48,8 @@ func TestClusterMetricsBuilder_GeneratedMetrics(t *testing.T) {
const expectedCount = 112

testBuilder := commontest.NewTestMetricsBuilder()
built := coreMetrics.Build(testBuilder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := coreMetrics.Build(testBuilder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metric := range built {
metric.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import (
"go.uber.org/zap"

"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
)

func TestSparkExtraMetricsBuilder_Executors(t *testing.T) {
Expand All @@ -32,7 +33,8 @@ func TestSparkExtraMetricsBuilder_Executors(t *testing.T) {
require.NoError(t, err)

builder := commontest.NewTestMetricsBuilder()
built := execMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := execMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand All @@ -53,7 +55,8 @@ func TestSparkExtraMetricsBuilder_Jobs(t *testing.T) {
require.NoError(t, err)

builder := commontest.NewTestMetricsBuilder()
built := jobMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := jobMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand All @@ -77,7 +80,8 @@ func TestSparkExtraMetricsBuilder_Stages(t *testing.T) {
require.NoError(t, err)

builder := commontest.NewTestMetricsBuilder()
built := stageMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := stageMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,15 +124,16 @@ func (m *ResourceMetrics) addStageInfo(clstr Cluster, appID string, info StageIn
})
}

func (m *ResourceMetrics) Build(builder *metadata.MetricsBuilder, now pcommon.Timestamp, rmo ...metadata.ResourceMetricsOption) []pmetric.Metrics {
func (m *ResourceMetrics) Build(mb *metadata.MetricsBuilder, rb *metadata.ResourceBuilder, now pcommon.Timestamp, instanceName string) []pmetric.Metrics {
var out []pmetric.Metrics
for rs, metricInfos := range m.m {
for _, mi := range metricInfos {
mi.build(builder, rs, now)
mi.build(mb, rs, now)
}
rmo = append(rmo, metadata.WithSparkClusterID(rs.cluster.ClusterID))
rmo = append(rmo, metadata.WithSparkClusterName(rs.cluster.ClusterName))
out = append(out, builder.Emit(rmo...))
rb.SetDatabricksInstanceName(instanceName)
rb.SetSparkClusterID(rs.cluster.ClusterID)
rb.SetSparkClusterName(rs.cluster.ClusterName)
out = append(out, mb.Emit(metadata.WithResource(rb.Emit())))
}
return out
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (
"go.opentelemetry.io/collector/pdata/pmetric"

"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
)

func TestSparkDbrMetrics_Append(t *testing.T) {
Expand All @@ -45,8 +46,9 @@ func TestSparkDbrMetrics_Append(t *testing.T) {
})
outerRM.Append(rmSub2)

builder := commontest.NewTestMetricsBuilder()
built := outerRM.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
mb := commontest.NewTestMetricsBuilder()
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := outerRM.Build(mb, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
allMetrics := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().CopyTo(allMetrics.ResourceMetrics())
Expand Down
6 changes: 4 additions & 2 deletions internal/receiver/databricksreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ type scraper struct {
dbrsvc databricks.Service
logger *zap.Logger
metricsBuilder *metadata.MetricsBuilder
resourceBuilder *metadata.ResourceBuilder
dbrInstanceName string
}

Expand All @@ -60,7 +61,8 @@ func (s scraper) scrape(_ context.Context) (pmetric.Metrics, error) {
return pmetric.Metrics{}, fmt.Errorf("scrape failed to add multi job run metrics: %w", err)
}

dbrMetrics := s.metricsBuilder.Emit(metadata.WithDatabricksInstanceName(s.dbrInstanceName))
s.resourceBuilder.SetDatabricksInstanceName(s.dbrInstanceName)
dbrMetrics := s.metricsBuilder.Emit(metadata.WithResource(s.resourceBuilder.Emit()))

// spark metrics
clusters, err := s.dbrsvc.RunningClusters()
Expand Down Expand Up @@ -103,7 +105,7 @@ func (s scraper) scrape(_ context.Context) (pmetric.Metrics, error) {
out := pmetric.NewMetrics()
dbrMetrics.ResourceMetrics().MoveAndAppendTo(out.ResourceMetrics())

sparkMetrics := allSparkDbrMetrics.Build(s.metricsBuilder, now, metadata.WithDatabricksInstanceName(s.dbrInstanceName))
sparkMetrics := allSparkDbrMetrics.Build(s.metricsBuilder, s.resourceBuilder, now, s.dbrInstanceName)
for _, metric := range sparkMetrics {
metric.ResourceMetrics().MoveAndAppendTo(out.ResourceMetrics())
}
Expand Down
4 changes: 4 additions & 0 deletions internal/receiver/databricksreceiver/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import (

"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/databricks"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/spark"
)

Expand All @@ -39,6 +40,7 @@ func TestScraper_Success(t *testing.T) {
logger: nopLogger,
dbrInstanceName: "my-instance",
metricsBuilder: commontest.NewTestMetricsBuilder(),
resourceBuilder: metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()),
rmp: databricks.NewRunMetricsProvider(dbrsvc),
dbrmp: databricks.MetricsProvider{Svc: dbrsvc},
scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc},
Expand Down Expand Up @@ -71,6 +73,7 @@ func TestScraper_Forbidden(t *testing.T) {
logger: nopLogger,
dbrInstanceName: "my-instance",
metricsBuilder: commontest.NewTestMetricsBuilder(),
resourceBuilder: metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()),
rmp: databricks.NewRunMetricsProvider(dbrsvc),
dbrmp: databricks.MetricsProvider{Svc: dbrsvc},
scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc},
Expand All @@ -94,6 +97,7 @@ func TestScraper_MultiCluster_Forbidden(t *testing.T) {
logger: nopLogger,
dbrInstanceName: "my-instance",
metricsBuilder: commontest.NewTestMetricsBuilder(),
resourceBuilder: metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig()),
rmp: databricks.NewRunMetricsProvider(dbrsvc),
dbrmp: databricks.MetricsProvider{Svc: dbrsvc},
scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc},
Expand Down

0 comments on commit afd4aa8

Please sign in to comment.