Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

receiver/*: remove use of pdata slice Resize() #4208

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions receiver/awsxrayreceiver/internal/translator/cause.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ func addCause(seg *awsxray.Segment, span *pdata.Span) {
// not sure whether there are existing events, so
// append new empty events instead
exceptionEventStartIndex := evts.Len()
evts.Resize(exceptionEventStartIndex + len(seg.Cause.Exceptions))
evts.EnsureCapacity(exceptionEventStartIndex + len(seg.Cause.Exceptions))

for i, excp := range seg.Cause.Exceptions {
evt := evts.At(exceptionEventStartIndex + i)
for _, excp := range seg.Cause.Exceptions {
evt := evts.AppendEmpty()
evt.SetName(conventions.AttributeExceptionEventName)
attrs := evt.Attributes()
attrs.Clear()
Expand Down
19 changes: 9 additions & 10 deletions receiver/awsxrayreceiver/internal/translator/translator.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ func ToTraces(rawSeg []byte) (*pdata.Traces, int, error) {
// library so only allocate one `InstrumentationLibrarySpans` in the
// `InstrumentationLibrarySpansSlice`.
ils := rspan.InstrumentationLibrarySpans().AppendEmpty()
ils.Spans().Resize(count)
ils.Spans().EnsureCapacity(count)
spans := ils.Spans()

// populating global attributes shared among segment and embedded subsegment(s)
Expand All @@ -72,7 +72,7 @@ func ToTraces(rawSeg []byte) (*pdata.Traces, int, error) {
// TraceID of the root segment in because embedded subsegments
// do not have that information, but it's needed after we flatten
// the embedded subsegment to generate independent child spans.
_, _, err = segToSpans(seg, seg.TraceID, nil, &spans, 0)
_, err = segToSpans(seg, seg.TraceID, nil, &spans)
if err != nil {
return nil, count, err
}
Expand All @@ -82,23 +82,22 @@ func ToTraces(rawSeg []byte) (*pdata.Traces, int, error) {

func segToSpans(seg awsxray.Segment,
traceID, parentID *string,
spans *pdata.SpanSlice, startingIndex int) (int, *pdata.Span, error) {
spans *pdata.SpanSlice) (*pdata.Span, error) {

span := spans.At(startingIndex)
span := spans.AppendEmpty()

err := populateSpan(&seg, traceID, parentID, &span)
if err != nil {
return 0, nil, err
return nil, err
}

startingIndexForSubsegment := 1 + startingIndex
var populatedChildSpan *pdata.Span
for _, s := range seg.Subsegments {
startingIndexForSubsegment, populatedChildSpan, err = segToSpans(s,
populatedChildSpan, err = segToSpans(s,
traceID, seg.ID,
spans, startingIndexForSubsegment)
spans)
if err != nil {
return 0, nil, err
return nil, err
}

if seg.Cause != nil &&
Expand All @@ -116,7 +115,7 @@ func segToSpans(seg awsxray.Segment,
}
}

return startingIndexForSubsegment, &span, nil
return &span, nil
}

func populateSpan(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1002,10 +1002,10 @@ func initResourceSpans(expectedSeg *awsxray.Segment,
}

ls := rs.InstrumentationLibrarySpans().AppendEmpty()
ls.Spans().Resize(len(propsPerSpan))
ls.Spans().EnsureCapacity(len(propsPerSpan))

for i, props := range propsPerSpan {
sp := ls.Spans().At(i)
for _, props := range propsPerSpan {
sp := ls.Spans().AppendEmpty()
spanIDBytes, _ := decodeXRaySpanID(&props.spanID)
sp.SetSpanID(pdata.NewSpanID(spanIDBytes))
if props.parentSpanID != nil {
Expand All @@ -1024,9 +1024,9 @@ func initResourceSpans(expectedSeg *awsxray.Segment,
sp.Status().SetCode(props.spanStatus.code)

if len(props.eventsProps) > 0 {
sp.Events().Resize(len(props.eventsProps))
for i, evtProps := range props.eventsProps {
spEvt := sp.Events().At(i)
sp.Events().EnsureCapacity(len(props.eventsProps))
for _, evtProps := range props.eventsProps {
spEvt := sp.Events().AppendEmpty()
spEvt.SetName(evtProps.name)
spEvt.Attributes().InitFromMap(evtProps.attrs)
}
Expand Down
4 changes: 2 additions & 2 deletions receiver/dotnetdiagnosticsreceiver/metrics/converter.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@ func rawMetricsToPdata(rawMetrics []dotnet.Metric, startTime, now time.Time) pda
ilms := rm.InstrumentationLibraryMetrics()
ilm := ilms.AppendEmpty()
ms := ilm.Metrics()
ms.Resize(len(rawMetrics))
ms.EnsureCapacity(len(rawMetrics))
for i := 0; i < len(rawMetrics); i++ {
rawMetricToPdata(rawMetrics[i], ms.At(i), startTime, now)
rawMetricToPdata(rawMetrics[i], ms.AppendEmpty(), startTime, now)
}
return pdm
}
Expand Down
4 changes: 2 additions & 2 deletions receiver/fluentforwardreceiver/conversion.go
Original file line number Diff line number Diff line change
Expand Up @@ -287,9 +287,9 @@ func (fe *ForwardEventLogRecords) DecodeMsg(dc *msgp.Reader) (err error) {
return
}

fe.LogSlice.Resize(int(entryLen))
fe.LogSlice.EnsureCapacity(int(entryLen))
for i := 0; i < int(entryLen); i++ {
lr := fe.LogSlice.At(i)
lr := fe.LogSlice.AppendEmpty()

err = parseEntryToLogRecord(dc, lr)
if err != nil {
Expand Down
4 changes: 2 additions & 2 deletions receiver/fluentforwardreceiver/util_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ type Log struct {
func Logs(recs ...Log) pdata.Logs {
out := pdata.NewLogs()
logSlice := out.ResourceLogs().AppendEmpty().InstrumentationLibraryLogs().AppendEmpty().Logs()
logSlice.Resize(len(recs))
logSlice.EnsureCapacity(len(recs))
for i := range recs {
l := logSlice.At(i)
l := logSlice.AppendEmpty()
recs[i].Body.CopyTo(l.Body())
l.SetTimestamp(pdata.Timestamp(recs[i].Timestamp))
l.Attributes().InitFromMap(recs[i].Attributes)
Expand Down
8 changes: 4 additions & 4 deletions receiver/redisreceiver/pdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ import (

func buildKeyspaceTriplet(k *keyspace, t *timeBundle) pdata.MetricSlice {
ms := pdata.NewMetricSlice()
ms.Resize(3)
initKeyspaceKeysMetric(k, t, ms.At(0))
initKeyspaceExpiresMetric(k, t, ms.At(1))
initKeyspaceTTLMetric(k, t, ms.At(2))
ms.EnsureCapacity(3)
initKeyspaceKeysMetric(k, t, ms.AppendEmpty())
initKeyspaceExpiresMetric(k, t, ms.AppendEmpty())
initKeyspaceTTLMetric(k, t, ms.AppendEmpty())
return ms
}

Expand Down
6 changes: 3 additions & 3 deletions receiver/signalfxreceiver/signalfxv2_event_to_logdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ import (
// pdata.LogSlice. Returning the converted data and the number of dropped log
// records.
func signalFxV2EventsToLogRecords(events []*sfxpb.Event, lrs pdata.LogSlice) {
lrs.Resize(len(events))
lrs.EnsureCapacity(len(events))

for i, event := range events {
lr := lrs.At(i)
for _, event := range events {
lr := lrs.AppendEmpty()

// The EventType field is the most logical "name" of the event.
lr.SetName(event.EventType)
Expand Down
12 changes: 6 additions & 6 deletions receiver/signalfxreceiver/signalfxv2_to_metricdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,17 @@ func signalFxV2ToMetrics(
ilm := rm.InstrumentationLibraryMetrics().AppendEmpty()

metrics := ilm.Metrics()
metrics.Resize(len(sfxDataPoints))
metrics.EnsureCapacity(len(sfxDataPoints))

i := 0
for _, sfxDataPoint := range sfxDataPoints {
if sfxDataPoint == nil {
// TODO: Log or metric for this odd ball?
continue
}

m := metrics.At(i)
// fill in a new, unassociated metric as we may drop it during the process
m := pdata.NewMetric()

// First check if the type is convertible and the data point is consistent.
err := fillInType(sfxDataPoint, m)
if err != nil {
Expand Down Expand Up @@ -85,11 +86,10 @@ func signalFxV2ToMetrics(
continue
}

i++
// We know at this point we're keeping this metric
m.CopyTo(metrics.AppendEmpty())
}

metrics.Resize(i)

return md, numDroppedDataPoints
}

Expand Down
8 changes: 4 additions & 4 deletions receiver/splunkhecreceiver/splunk_to_logdata_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,10 +114,10 @@ func Test_SplunkHecToLogData(t *testing.T) {
logsSlice := createLogsSlice(nanoseconds)
foosArr := pdata.NewAttributeValueArray()
foos := foosArr.ArrayVal()
foos.Resize(3)
foos.At(0).SetStringVal("foo")
foos.At(1).SetStringVal("bar")
foos.At(2).SetStringVal("foobar")
foos.EnsureCapacity(3)
foos.AppendEmpty().SetStringVal("foo")
foos.AppendEmpty().SetStringVal("bar")
foos.AppendEmpty().SetStringVal("foobar")

attVal := pdata.NewAttributeValueMap()
attMap := attVal.MapVal()
Expand Down
18 changes: 9 additions & 9 deletions receiver/statsdreceiver/protocol/metric_translator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,20 +88,20 @@ func TestBuildSummaryMetric(t *testing.T) {

metric := buildSummaryMetric(oneSummaryMetric)
expectedMetric := pdata.NewInstrumentationLibraryMetrics()
expectedMetric.Metrics().Resize(1)
expectedMetric.Metrics().At(0).SetName("testSummary")
expectedMetric.Metrics().At(0).SetDataType(pdata.MetricDataTypeSummary)
expectedMetric.Metrics().At(0).Summary().DataPoints().Resize(1)
expectedMetric.Metrics().At(0).Summary().DataPoints().At(0).SetSum(21)
expectedMetric.Metrics().At(0).Summary().DataPoints().At(0).SetCount(6)
expectedMetric.Metrics().At(0).Summary().DataPoints().At(0).SetTimestamp(pdata.TimestampFromTime(timeNow))
m := expectedMetric.Metrics().AppendEmpty()
m.SetName("testSummary")
m.SetDataType(pdata.MetricDataTypeSummary)
dp := m.Summary().DataPoints().AppendEmpty()
dp.SetSum(21)
dp.SetCount(6)
dp.SetTimestamp(pdata.TimestampFromTime(timeNow))
for i, key := range oneSummaryMetric.labelKeys {
expectedMetric.Metrics().At(0).Summary().DataPoints().At(0).LabelsMap().Insert(key, oneSummaryMetric.labelValues[i])
dp.LabelsMap().Insert(key, oneSummaryMetric.labelValues[i])
}
quantile := []float64{0, 10, 50, 90, 95, 100}
value := []float64{1, 1, 3, 6, 6, 6}
for int, v := range quantile {
eachQuantile := expectedMetric.Metrics().At(0).Summary().DataPoints().At(0).QuantileValues().AppendEmpty()
eachQuantile := dp.QuantileValues().AppendEmpty()
eachQuantile.SetQuantile(v)
eachQuantileValue := value[int]
eachQuantile.SetValue(eachQuantileValue)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,19 +110,16 @@ func (s *scraper) scrape(context.Context) (pdata.MetricSlice, error) {

var errors []error

metrics.Resize(len(s.counters))
idx := 0
metrics.EnsureCapacity(len(s.counters))
for _, counter := range s.counters {
counterValues, err := counter.ScrapeData()
if err != nil {
errors = append(errors, err)
continue
}

initializeDoubleGaugeMetric(metrics.At(idx), now, counter.Path(), counterValues)
idx++
initializeDoubleGaugeMetric(metrics.AppendEmpty(), now, counter.Path(), counterValues)
}
metrics.Resize(len(s.counters) - len(errors))

return metrics, consumererror.Combine(errors)
}
Expand All @@ -133,9 +130,9 @@ func initializeDoubleGaugeMetric(metric pdata.Metric, now pdata.Timestamp, name

dg := metric.Gauge()
ddps := dg.DataPoints()
ddps.Resize(len(counterValues))
for i, counterValue := range counterValues {
initializeDoubleDataPoint(ddps.At(i), now, counterValue.InstanceName, counterValue.Value)
ddps.EnsureCapacity(len(counterValues))
for _, counterValue := range counterValues {
initializeDoubleDataPoint(ddps.AppendEmpty(), now, counterValue.InstanceName, counterValue.Value)
}
}

Expand Down
10 changes: 6 additions & 4 deletions receiver/zookeeperreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,17 +115,15 @@ func (z *zookeeperMetricsScraper) getResourceMetrics(conn net.Conn) (pdata.Resou

md := pdata.NewMetrics()
z.appendMetrics(scanner, md.ResourceMetrics())
if md.DataPointCount() == 0 {
md.ResourceMetrics().Resize(0)
}
return md.ResourceMetrics(), nil
}

func (z *zookeeperMetricsScraper) appendMetrics(scanner *bufio.Scanner, rms pdata.ResourceMetricsSlice) {
now := pdata.TimestampFromTime(time.Now())
rm := rms.AppendEmpty()
rm := pdata.NewResourceMetrics()
ilm := rm.InstrumentationLibraryMetrics().AppendEmpty()
ilm.InstrumentationLibrary().SetName("otelcol/zookeeper")
keepRM := false
for scanner.Scan() {
line := scanner.Text()
parts := zookeeperFormatRE.FindStringSubmatch(line)
Expand Down Expand Up @@ -173,8 +171,12 @@ func (z *zookeeperMetricsScraper) appendMetrics(scanner *bufio.Scanner, rms pdat
dp.SetTimestamp(now)
dp.SetValue(int64Val)
}
keepRM = true
}
}
if keepRM {
rm.CopyTo(rms.AppendEmpty())
}
}

func closeConnection(conn net.Conn) error {
Expand Down