Skip to content

Commit

Permalink
Minor improvements
Browse files Browse the repository at this point in the history
- improve error handling
- tweak default system message
  • Loading branch information
TanGentleman committed Jan 1, 2025
1 parent a70af17 commit c45b0aa
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 16 deletions.
2 changes: 2 additions & 0 deletions src/open_webui_workspace/pipe_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ def yield_stream_response(response: requests.Response) -> Generator:
elif isinstance(chunk_data, dict) and 'choices' in chunk_data:
delta = chunk_data['choices'][0].get('delta', {})
chunk_content = delta.get('content', '')
else:
logging.critical(f"Unexpected chunk data: {chunk_data}")

if chunk_content: # Only yield non-empty content
yield chunk_content
Expand Down
4 changes: 2 additions & 2 deletions src/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class OutletRequestBody(TypedDict):
class Models:
"""Model configuration constants"""
FLASH_MODEL = "openrouter/google/gemini-flash-1.5-8b"
SMART_MODEL = "Llama-3.1-70B"
SMART_MODEL = "Llama-3.3-70B"


# Configuration
Expand All @@ -68,7 +68,7 @@ class Models:

PIPE_CONFIG = {
# "LLM_API_BASE_URL": "http://localhost:4000/v1",
# "RESPONSE_MODEL": Models.FLASH_MODEL,
"RESPONSE_MODEL": Models.SMART_MODEL,
# "GET_RESPONSE": True,
}

Expand Down
15 changes: 5 additions & 10 deletions src/utils/constants.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,13 @@
# System Messages
TOOL_SYSTEM_MESSAGE = """You are a helpful search assistant. Use the supplied tools to search the database and assist the user. If the user requests recent results, default to the last 48 hours."""

FINAL_RESPONSE_SYSTEM_MESSAGE = """You are a helpful AI assistant analyzing personal data from ScreenPipe. Your task is to:
FINAL_RESPONSE_SYSTEM_MESSAGE = """You are an AI assistant analyzing screen activity data from ScreenPipe. Your task is to:
1. Understand the user's intent from their original query
2. Carefully analyze the provided results (audio/OCR data)
3. Give clear, relevant insights from the context, even if it's not directly related to the query
1. Analyze the provided data (OCR text, audio transcriptions, and metadata)
2. Provide clear, actionable insights
3. Answer the user's query with the retrieved data
The data will be provided in XML tags:
- <user_query>: The original user question
- <search_parameters>: The parameters used to filter the data
- <context>: The results of the search
Focus on making connections between the user's intent and the retrieved data to provide meaningful analysis."""
Input data is provided in XML tags (<user_query>, <search_parameters>, <context>). Focus on delivering practical insights that help users understand and improve their screen time patterns."""


DEFAULT_QUERY = "Search the past 10 days for audio and screen content. Try your best to contextualize my conversations with a limit of 2 results."
Expand Down
9 changes: 5 additions & 4 deletions src/utils/owui_utils/pipeline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,14 +265,14 @@ def search(self, **kwargs) -> dict:
results = response.json()

return results if results.get("data") else {
"error": "No results found"}
"search_error": "No results found"}

except requests.exceptions.RequestException as e:
logging.error(f"Search request failed: {e}")
return {"error": f"Search failed: {str(e)}"}
return {"search_error": f"Search request failed."}
except Exception as e:
logging.error(f"Unexpected error in search: {e}")
return {"error": f"Unexpected error: {str(e)}"}
return {"search_error": f"Unexpected error in search!"}

def _process_search_params(self, params: dict) -> dict:
"""Process and validate search parameters"""
Expand Down Expand Up @@ -397,7 +397,8 @@ def sanitize_results(results: dict,
@staticmethod
def catch_malformed_tool(response_text: str) -> str | dict:
"""Parse response text to extract tool call if present, otherwise return original text."""
TOOL_PREFIX = "<function=screenpipe_search>"
FUNCTION_NAME = "screenpipe_search"
TOOL_PREFIX = f"<function={FUNCTION_NAME}>"

if not response_text.startswith(TOOL_PREFIX):
return response_text
Expand Down

0 comments on commit c45b0aa

Please sign in to comment.