Skip to content

Commit 5d77678

Browse files
committed
small fixes
1 parent 2f148bb commit 5d77678

File tree

2 files changed

+10
-10
lines changed

2 files changed

+10
-10
lines changed

notebooks/Oumi - Using vLLM Engine for Inference.ipynb

+3-3
Original file line numberDiff line numberDiff line change
@@ -88,9 +88,9 @@
8888
"%%writefile llama70b_inference_config.yaml\n",
8989
"\n",
9090
"model:\n",
91-
"# model_name: \"meta-llama/Meta-Llama-3.1-8B-Instruct\" # 8B model, requires 1x A100-40GB GPUs\n",
92-
" model_name: \"meta-llama/Meta-Llama-3.1-70B-Instruct\" # 70B model, requires 4x A100-40GB GPUs\n",
93-
"# model_name: \"bartowski/Meta-Llama-3.1-70B-Instruct-GGUF\" # 4-bit quantized model, requires 1x A100-40GB GPUs. See bonus section for more details.\n",
91+
" model_name: \"meta-llama/Meta-Llama-3.1-8B-Instruct\" # 8B model, requires 1x A100-40GB GPUs\n",
92+
" # model_name: \"meta-llama/Meta-Llama-3.1-70B-Instruct\" # 70B model, requires 4x A100-40GB GPUs\n",
93+
" # model_name: \"bartowski/Meta-Llama-3.1-70B-Instruct-GGUF\" # 4-bit quantized model, requires 1x A100-40GB GPUs. See bonus section for more details.\n",
9494
" model_max_length: 512\n",
9595
" torch_dtype_str: \"bfloat16\"\n",
9696
" trust_remote_code: True\n",

src/oumi/core/types/turn.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,8 @@ def is_text(self) -> bool:
107107
return self.type == Type.TEXT
108108

109109
def __repr__(self):
110-
"""Return a string representation of the message."""
111-
content = self.content if self.is_text() else "BINARY"
110+
"""Returns a string representation of the message."""
111+
content = self.content if self.is_text() else "<non-text-content>"
112112
return f"{self.role.upper()}: {content}"
113113

114114

@@ -133,7 +133,7 @@ class Conversation(pydantic.BaseModel):
133133
"""
134134

135135
def __getitem__(self, idx: int) -> Message:
136-
"""Get the message at the specified index.
136+
"""Gets the message at the specified index.
137137
138138
Args:
139139
idx (int): The index of the message to retrieve.
@@ -144,7 +144,7 @@ def __getitem__(self, idx: int) -> Message:
144144
return self.messages[idx]
145145

146146
def first_message(self, role: Optional[Role] = None) -> Optional[Message]:
147-
"""Get the first message in the conversation, optionally filtered by role.
147+
"""Gets the first message in the conversation, optionally filtered by role.
148148
149149
Args:
150150
role: The role to filter messages by.
@@ -158,7 +158,7 @@ def first_message(self, role: Optional[Role] = None) -> Optional[Message]:
158158
return messages[0] if len(messages) > 0 else None
159159

160160
def last_message(self, role: Optional[Role] = None) -> Optional[Message]:
161-
"""Get the last message in the conversation, optionally filtered by role.
161+
"""Gets the last message in the conversation, optionally filtered by role.
162162
163163
Args:
164164
role: The role to filter messages by.
@@ -172,7 +172,7 @@ def last_message(self, role: Optional[Role] = None) -> Optional[Message]:
172172
return messages[-1] if len(messages) > 0 else None
173173

174174
def filter_messages(self, role: Optional[Role] = None) -> List[Message]:
175-
"""Get all messages in the conversation, optionally filtered by role.
175+
"""Gets all messages in the conversation, optionally filtered by role.
176176
177177
Args:
178178
role: The role to filter messages by.
@@ -188,7 +188,7 @@ def filter_messages(self, role: Optional[Role] = None) -> List[Message]:
188188
return messages
189189

190190
def __repr__(self):
191-
"""Return a string representation of the conversation."""
191+
"""Returns a string representation of the conversation."""
192192
return "\n".join([repr(m) for m in self.messages])
193193

194194

0 commit comments

Comments
 (0)