Skip to content

Commit 4be03c4

Browse files
committed
first round of cleanup for lintcheck
1 parent 4346580 commit 4be03c4

File tree

7 files changed

+34
-27
lines changed

7 files changed

+34
-27
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# Package marker
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# Package marker
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# Package marker
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# Package marker

util/opentelemetry-util-genai/src/opentelemetry/util/genai/data.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
from dataclasses import dataclass
16+
from typing import Type
1617

1718

1819
@dataclass
@@ -49,4 +50,4 @@ class ChatGeneration:
4950
@dataclass
5051
class Error:
5152
message: str
52-
type: type[BaseException]
53+
type: Type[BaseException]

util/opentelemetry-util-genai/src/opentelemetry/util/genai/emitters.py

Lines changed: 26 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -112,31 +112,32 @@ def _chat_generation_to_log_record(
112112
framework,
113113
capture_content: bool,
114114
) -> Optional[LogRecord]:
115-
if chat_generation:
116-
attributes = {
117-
# TODO: add below to opentelemetry.semconv._incubating.attributes.gen_ai_attributes
118-
"gen_ai.framework": framework,
119-
# TODO: Convert below to constant once opentelemetry.semconv._incubating.attributes.gen_ai_attributes is available
120-
"gen_ai.provider.name": provider_name,
121-
}
122-
123-
message = {
124-
"type": chat_generation.type,
125-
}
126-
if capture_content and chat_generation.content:
127-
message["content"] = chat_generation.content
128-
129-
body = {
130-
"index": index,
131-
"finish_reason": chat_generation.finish_reason or "error",
132-
"message": message,
133-
}
134-
135-
return LogRecord(
136-
event_name="gen_ai.choice",
137-
attributes=attributes,
138-
body=body or None,
139-
)
115+
if not chat_generation:
116+
return None
117+
attributes = {
118+
# TODO: add below to opentelemetry.semconv._incubating.attributes.gen_ai_attributes
119+
"gen_ai.framework": framework,
120+
# TODO: Convert below to constant once opentelemetry.semconv._incubating.attributes.gen_ai_attributes is available
121+
"gen_ai.provider.name": provider_name,
122+
}
123+
124+
message = {
125+
"type": chat_generation.type,
126+
}
127+
if capture_content and chat_generation.content:
128+
message["content"] = chat_generation.content
129+
130+
body = {
131+
"index": index,
132+
"finish_reason": chat_generation.finish_reason or "error",
133+
"message": message,
134+
}
135+
136+
return LogRecord(
137+
event_name="gen_ai.choice",
138+
attributes=attributes,
139+
body=body or None,
140+
)
140141

141142

142143
def _get_metric_attributes(

util/opentelemetry-util-genai/src/opentelemetry/util/genai/handler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,8 @@ def __init__(self, emitter_type_full: bool = True, **kwargs: Any):
110110
self._llm_registry: dict[UUID, LLMInvocation] = {}
111111
self._lock = Lock()
112112

113-
def _should_collect_content(self) -> bool:
113+
@staticmethod
114+
def _should_collect_content() -> bool:
114115
return True # Placeholder for future config
115116

116117
def start_llm(

0 commit comments

Comments
 (0)