Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
5333893
removing LogData and extending SDK LogRecord to have instrumentation …
hectorhdzg Jul 9, 2025
c3c3bac
Fix tests
hectorhdzg Jul 9, 2025
b7cda72
Keep LogData to avoid errors
hectorhdzg Jul 9, 2025
489fb85
Removing LogData
hectorhdzg Jul 11, 2025
9d61737
Merge branch 'main' into hectorhdzg/removelogdata
hectorhdzg Jul 14, 2025
748d50a
Merge branch 'main' into hectorhdzg/removelogdata
hectorhdzg Jul 17, 2025
59a4f7e
Update
hectorhdzg Jul 17, 2025
7591d05
Update test
hectorhdzg Jul 17, 2025
9ed04bb
Update
hectorhdzg Jul 18, 2025
13f927d
Update event test
hectorhdzg Jul 18, 2025
180c189
Update
hectorhdzg Jul 19, 2025
671eb69
Merge branch 'main' into hectorhdzg/removelogdata
hectorhdzg Jul 21, 2025
5f9e550
Address comments
hectorhdzg Jul 21, 2025
ba283ab
Update
hectorhdzg Jul 21, 2025
292c70f
Merge remote-tracking branch 'upstream/main' into hectorhdzg/removelo…
hectorhdzg Jul 29, 2025
c3f1607
Add ReadableLogRecord and ReadWriteLogRecord
hectorhdzg Sep 9, 2025
540bfe7
Merge branch 'main' into hectorhdzg/removelogdata
hectorhdzg Sep 9, 2025
d1cd9e5
Update
hectorhdzg Sep 9, 2025
9387ff1
Update tests
hectorhdzg Sep 9, 2025
940f642
Add dropped_attributes in ReadableLogRecord, this is used to encode l…
hectorhdzg Sep 9, 2025
dbef47b
Convert to ReadWriteLogRecord before exporting
hectorhdzg Sep 9, 2025
0e0b451
Update EventLogger
hectorhdzg Sep 9, 2025
d7a467a
Update
hectorhdzg Sep 9, 2025
5955465
Fix events tests
hectorhdzg Sep 9, 2025
9da84bc
Update
hectorhdzg Sep 9, 2025
34d2a1b
Update event test
hectorhdzg Sep 9, 2025
b232ac8
Merge branch 'main' into hectorhdzg/removelogdata
hectorhdzg Sep 10, 2025
d7f58f0
Update after merge
hectorhdzg Sep 10, 2025
4ca4db8
Update otlp common test
hectorhdzg Sep 10, 2025
1025fb1
Merge branch 'main' into hectorhdzg/removelogdata
hectorhdzg Sep 11, 2025
7eb9f58
Update
hectorhdzg Sep 11, 2025
53ecfa5
Address comments
hectorhdzg Sep 12, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ can cause a deadlock to occur over `logging._lock` in some cases ([#4636](https:
([#4669](https://github.com/open-telemetry/opentelemetry-python/pull/4669))
- Set expected User-Agent in HTTP headers for grpc OTLP exporter
([#4658](https://github.com/open-telemetry/opentelemetry-python/pull/4658))
- Remove LogData and extend SDK LogRecord to have instrumentation scope
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should have a breaking changes section or something here, maybe I can update the title of the PR to reflect that

([#4676](https://github.com/open-telemetry/opentelemetry-python/pull/4676))

## Version 1.34.0/0.55b0 (2025-06-04)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,51 +30,55 @@
ResourceLogs,
ScopeLogs,
)
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs import ReadableLogRecord


def encode_logs(batch: Sequence[LogData]) -> ExportLogsServiceRequest:
def encode_logs(
batch: Sequence[ReadableLogRecord],
) -> ExportLogsServiceRequest:
return ExportLogsServiceRequest(resource_logs=_encode_resource_logs(batch))


def _encode_log(log_data: LogData) -> PB2LogRecord:
def _encode_log(readable_log_record: ReadableLogRecord) -> PB2LogRecord:
span_id = (
None
if log_data.log_record.span_id == 0
else _encode_span_id(log_data.log_record.span_id)
if readable_log_record.log_record.span_id == 0
else _encode_span_id(readable_log_record.log_record.span_id)
)
trace_id = (
None
if log_data.log_record.trace_id == 0
else _encode_trace_id(log_data.log_record.trace_id)
if readable_log_record.log_record.trace_id == 0
else _encode_trace_id(readable_log_record.log_record.trace_id)
)
body = log_data.log_record.body
body = readable_log_record.log_record.body
return PB2LogRecord(
time_unix_nano=log_data.log_record.timestamp,
observed_time_unix_nano=log_data.log_record.observed_timestamp,
time_unix_nano=readable_log_record.log_record.timestamp,
observed_time_unix_nano=readable_log_record.log_record.observed_timestamp,
span_id=span_id,
trace_id=trace_id,
flags=int(log_data.log_record.trace_flags),
flags=int(readable_log_record.log_record.trace_flags),
body=_encode_value(body, allow_null=True),
severity_text=log_data.log_record.severity_text,
severity_text=readable_log_record.log_record.severity_text,
attributes=_encode_attributes(
log_data.log_record.attributes, allow_null=True
readable_log_record.log_record.attributes, allow_null=True
),
dropped_attributes_count=log_data.log_record.dropped_attributes,
dropped_attributes_count=readable_log_record.dropped_attributes,
severity_number=getattr(
log_data.log_record.severity_number, "value", None
readable_log_record.log_record.severity_number, "value", None
),
event_name=log_data.log_record.event_name,
event_name=readable_log_record.log_record.event_name,
)


def _encode_resource_logs(batch: Sequence[LogData]) -> List[ResourceLogs]:
def _encode_resource_logs(
batch: Sequence[ReadableLogRecord],
) -> List[ResourceLogs]:
sdk_resource_logs = defaultdict(lambda: defaultdict(list))

for sdk_log in batch:
sdk_resource = sdk_log.log_record.resource
sdk_instrumentation = sdk_log.instrumentation_scope or None
pb2_log = _encode_log(sdk_log)
for readable_log in batch:
sdk_resource = readable_log.resource
sdk_instrumentation = readable_log.instrumentation_scope or None
pb2_log = _encode_log(readable_log)

sdk_resource_logs[sdk_resource][sdk_instrumentation].append(pb2_log)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import unittest
from typing import List, Tuple

from opentelemetry._logs import SeverityNumber
from opentelemetry._logs import LogRecord, SeverityNumber
from opentelemetry.exporter.otlp.proto.common._internal import (
_encode_attributes,
_encode_span_id,
Expand Down Expand Up @@ -45,8 +45,7 @@
from opentelemetry.proto.resource.v1.resource_pb2 import (
Resource as PB2Resource,
)
from opentelemetry.sdk._logs import LogData, LogLimits
from opentelemetry.sdk._logs import LogRecord as SDKLogRecord
from opentelemetry.sdk._logs import LogLimits, ReadWriteLogRecord
from opentelemetry.sdk.resources import Resource as SDKResource
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
from opentelemetry.trace import (
Expand Down Expand Up @@ -77,7 +76,7 @@ def test_encode_no_body(self):
def test_dropped_attributes_count(self):
sdk_logs = self._get_test_logs_dropped_attributes()
encoded_logs = encode_logs(sdk_logs)
self.assertTrue(hasattr(sdk_logs[0].log_record, "dropped_attributes"))
self.assertTrue(hasattr(sdk_logs[0], "dropped_attributes"))
self.assertEqual(
# pylint:disable=no-member
encoded_logs.resource_logs[0]
Expand All @@ -88,7 +87,7 @@ def test_dropped_attributes_count(self):
)

@staticmethod
def _get_sdk_log_data() -> List[LogData]:
def _get_sdk_log_data() -> List[ReadWriteLogRecord]:
# pylint:disable=too-many-locals
ctx_log1 = set_span_in_context(
NonRecordingSpan(
Expand All @@ -100,35 +99,35 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log1 = LogData(
log_record=SDKLogRecord(
log1 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650195189786880,
observed_timestamp=1644650195189786881,
context=ctx_log1,
severity_text="WARN",
severity_number=SeverityNumber.WARN,
body="Do not go gentle into that good night. Rage, rage against the dying of the light",
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
attributes={"a": 1, "b": "c"},
),
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
instrumentation_scope=InstrumentationScope(
"first_name", "first_version"
),
)

log2 = LogData(
log_record=SDKLogRecord(
log2 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650249738562048,
observed_timestamp=1644650249738562049,
severity_text="WARN",
severity_number=SeverityNumber.WARN,
body="Cooper, this is no time for caution!",
resource=SDKResource({"second_resource": "CASE"}),
attributes={},
),
resource=SDKResource({"second_resource": "CASE"}),
instrumentation_scope=InstrumentationScope(
"second_name", "second_version"
),
Expand All @@ -144,17 +143,17 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log3 = LogData(
log_record=SDKLogRecord(
log3 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650427658989056,
observed_timestamp=1644650427658989057,
context=ctx_log3,
severity_text="DEBUG",
severity_number=SeverityNumber.DEBUG,
body="To our galaxy",
resource=SDKResource({"second_resource": "CASE"}),
attributes={"a": 1, "b": "c"},
),
resource=SDKResource({"second_resource": "CASE"}),
instrumentation_scope=None,
)

Expand All @@ -168,20 +167,20 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log4 = LogData(
log_record=SDKLogRecord(
log4 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650584292683008,
observed_timestamp=1644650584292683009,
context=ctx_log4,
severity_text="INFO",
severity_number=SeverityNumber.INFO,
body="Love is the one thing that transcends time and space",
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
attributes={"filename": "model.py", "func_name": "run_method"},
),
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
instrumentation_scope=InstrumentationScope(
"another_name", "another_version"
),
Expand All @@ -197,17 +196,17 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log5 = LogData(
log_record=SDKLogRecord(
log5 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650584292683009,
observed_timestamp=1644650584292683010,
context=ctx_log5,
severity_text="INFO",
severity_number=SeverityNumber.INFO,
body={"error": None, "array_with_nones": [1, None, 2]},
resource=SDKResource({}),
attributes={},
),
resource=SDKResource({}),
instrumentation_scope=InstrumentationScope(
"last_name", "last_version"
),
Expand All @@ -223,20 +222,20 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log6 = LogData(
log_record=SDKLogRecord(
log6 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650584292683022,
observed_timestamp=1644650584292683022,
context=ctx_log6,
severity_text="ERROR",
severity_number=SeverityNumber.ERROR,
body="This instrumentation scope has a schema url",
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
attributes={"filename": "model.py", "func_name": "run_method"},
),
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
instrumentation_scope=InstrumentationScope(
"scope_with_url",
"scope_with_url_version",
Expand All @@ -254,20 +253,20 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log7 = LogData(
log_record=SDKLogRecord(
log7 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650584292683033,
observed_timestamp=1644650584292683033,
context=ctx_log7,
severity_text="FATAL",
severity_number=SeverityNumber.FATAL,
body="This instrumentation scope has a schema url and attributes",
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
attributes={"filename": "model.py", "func_name": "run_method"},
),
resource=SDKResource(
{"first_resource": "value"},
"resource_schema_url",
),
instrumentation_scope=InstrumentationScope(
"scope_with_attributes",
"scope_with_attributes_version",
Expand All @@ -286,21 +285,21 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log8 = LogData(
log_record=SDKLogRecord(
log8 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650584292683044,
observed_timestamp=1644650584292683044,
context=ctx_log8,
severity_text="INFO",
severity_number=SeverityNumber.INFO,
body="Test export of extended attributes",
resource=SDKResource({}),
attributes={
"extended": {
"sequence": [{"inner": "mapping", "none": None}]
}
},
),
resource=SDKResource({}),
instrumentation_scope=InstrumentationScope(
"extended_name", "extended_version"
),
Expand All @@ -316,13 +315,13 @@ def _get_sdk_log_data() -> List[LogData]:
)
)
)
log9 = LogData(
log_record=SDKLogRecord(
log9 = ReadWriteLogRecord(
LogRecord(
# these are otherwise set by default
observed_timestamp=1644650584292683045,
context=ctx_log9,
resource=SDKResource({}),
),
resource=SDKResource({}),
instrumentation_scope=InstrumentationScope(
"empty_log_record_name", "empty_log_record_version"
),
Expand All @@ -331,7 +330,7 @@ def _get_sdk_log_data() -> List[LogData]:

def get_test_logs(
self,
) -> Tuple[List[SDKLogRecord], ExportLogsServiceRequest]:
) -> Tuple[List[ReadWriteLogRecord], ExportLogsServiceRequest]:
sdk_logs = self._get_sdk_log_data()

pb2_service_request = ExportLogsServiceRequest(
Expand Down Expand Up @@ -647,7 +646,7 @@ def get_test_logs(
return sdk_logs, pb2_service_request

@staticmethod
def _get_test_logs_dropped_attributes() -> List[LogData]:
def _get_test_logs_dropped_attributes() -> List[ReadWriteLogRecord]:
ctx_log1 = set_span_in_context(
NonRecordingSpan(
SpanContext(
Expand All @@ -658,34 +657,34 @@ def _get_test_logs_dropped_attributes() -> List[LogData]:
)
)
)
log1 = LogData(
log_record=SDKLogRecord(
log1 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650195189786880,
context=ctx_log1,
severity_text="WARN",
severity_number=SeverityNumber.WARN,
body="Do not go gentle into that good night. Rage, rage against the dying of the light",
resource=SDKResource({"first_resource": "value"}),
attributes={"a": 1, "b": "c", "user_id": "B121092"},
limits=LogLimits(max_attributes=1),
),
resource=SDKResource({"first_resource": "value"}),
limits=LogLimits(max_attributes=1),
instrumentation_scope=InstrumentationScope(
"first_name", "first_version"
),
)
ctx_log2 = set_span_in_context(
NonRecordingSpan(SpanContext(0, 0, False))
)
log2 = LogData(
log_record=SDKLogRecord(
log2 = ReadWriteLogRecord(
LogRecord(
timestamp=1644650249738562048,
context=ctx_log2,
severity_text="WARN",
severity_number=SeverityNumber.WARN,
body="Cooper, this is no time for caution!",
resource=SDKResource({"second_resource": "CASE"}),
attributes={},
),
resource=SDKResource({"second_resource": "CASE"}),
instrumentation_scope=InstrumentationScope(
"second_name", "second_version"
),
Expand Down
Loading
Loading