Skip to content
This repository was archived by the owner on Mar 26, 2025. It is now read-only.

Commit de80578

Browse files
authored
Merge pull request #554 from openchatai/feat/debug_mode
implementing debug mode
2 parents bfcd3e2 + 8a80db9 commit de80578

File tree

14 files changed

+154
-82
lines changed

14 files changed

+154
-82
lines changed
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
class APICallFailedException(Exception):
2+
"""Custom exception for API call failures."""
3+
4+
def __init__(self, message="API call failed"):
5+
self.message = message
6+
super().__init__(self.message)
Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,27 @@
1-
from typing import TypedDict, Optional
1+
from typing import NamedTuple, TypedDict, Optional, Dict, Any
2+
import requests
3+
from dataclasses import dataclass, field
24

35

46
class ResponseDict(TypedDict):
57
response: Optional[str]
68
error: Optional[str]
9+
10+
11+
@dataclass
12+
class ApiRequestResult:
13+
api_requests: Dict[str, str] = field(default_factory=dict)
14+
15+
16+
class LLMResponse(NamedTuple):
17+
message: Optional[str]
18+
error: Optional[str]
19+
api_request_response: ApiRequestResult
20+
21+
@classmethod
22+
def create_default(cls):
23+
return cls(
24+
message=None,
25+
error=None,
26+
api_request_response=ApiRequestResult(),
27+
)
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
"""Add debug_json column to chat history
2+
3+
Revision ID: 62ef7ae67c7d
4+
Revises: 228d50d1fc45
5+
Create Date: 2024-01-20 19:04:21.677356
6+
7+
"""
8+
from typing import Sequence, Union
9+
10+
from alembic import op
11+
import sqlalchemy as sa
12+
from sqlalchemy.dialects import mysql
13+
14+
# revision identifiers, used by Alembic.
15+
revision: str = "62ef7ae67c7d"
16+
down_revision: Union[str, None] = "228d50d1fc45"
17+
branch_labels: Union[str, Sequence[str], None] = None
18+
depends_on: Union[str, Sequence[str], None] = None
19+
20+
21+
def upgrade():
22+
if (
23+
not op.get_bind()
24+
.execute(sa.text("SHOW COLUMNS FROM chat_history LIKE 'debug_json'"))
25+
.fetchone()
26+
):
27+
op.add_column("chat_history", sa.Column("debug_json", sa.Text(), nullable=True))
28+
29+
30+
def downgrade():
31+
op.drop_column("chatbots", "global_variables")

llm-server/models/repository/chat_history_repo.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -255,6 +255,7 @@ def create_chat_histories(
255255
session_id=record["session_id"],
256256
from_user=record["from_user"],
257257
message=record["message"],
258+
debug_json=record.get("debug_json"),
258259
)
259260

260261
session.add(chat_history)

llm-server/routes/chat/chat_controller.py

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from typing import cast, Dict
44

55
from flask import jsonify, Blueprint, request, Response, abort, Request
6-
from custom_types.response_dict import ResponseDict
6+
from custom_types.response_dict import LLMResponse, ResponseDict
77

88
from models.repository.chat_history_repo import (
99
get_all_chat_history_by_session_id,
@@ -23,6 +23,7 @@
2323
from utils.sqlalchemy_objs_to_json_array import sqlalchemy_objs_to_json_array
2424
from .. import root_service
2525
from flask_socketio import emit
26+
import asyncio
2627

2728
logger = CustomLogger(module_name=__name__)
2829

@@ -127,10 +128,6 @@ async def send_chat_stream(
127128

128129
@chat_workflow.route("/send", methods=["POST"])
129130
async def send_chat():
130-
response_data: ResponseDict = {
131-
"error": "",
132-
"response": "Something went wrong, please try again!",
133-
}
134131
json_data = request.get_json()
135132

136133
input_data = ChatInput(**json_data)
@@ -170,7 +167,8 @@ async def handle_chat_send_common(
170167
strategy = ToolStrategy()
171168

172169
headers_from_json.update(bot.global_variables or {})
173-
response_data = await strategy.handle_request(
170+
171+
result = await strategy.handle_request(
174172
message,
175173
session_id,
176174
str(base_prompt),
@@ -180,7 +178,8 @@ async def handle_chat_send_common(
180178
is_streaming,
181179
)
182180

183-
if response_data["response"]:
181+
# if the llm replied correctly
182+
if result.message is not None:
184183
chat_records = [
185184
{
186185
"session_id": session_id,
@@ -190,27 +189,25 @@ async def handle_chat_send_common(
190189
{
191190
"session_id": session_id,
192191
"from_user": False,
193-
"message": response_data["response"]
194-
or response_data["error"]
195-
or "",
192+
"message": result.message,
193+
"debug_json": str(result.api_request_response.__dict__),
196194
},
197195
]
198196

199197
upsert_analytics_record(
200198
chatbot_id=str(bot.id), successful_operations=1, total_operations=1
201199
)
202200
create_chat_histories(str(bot.id), chat_records)
203-
elif response_data["error"]:
201+
202+
elif result.error:
204203
upsert_analytics_record(
205204
chatbot_id=str(bot.id),
206205
successful_operations=0,
207206
total_operations=1,
208-
logs=response_data["error"],
207+
logs=result.error,
209208
)
210-
211-
emit(session_id, "|im_end|") if is_streaming else None
212-
return jsonify(
213-
{"type": "text", "response": {"text": response_data["response"]}}
209+
emit(session_id, "|im_end|") if is_streaming else jsonify(
210+
{"type": "text", "response": {"text": result.message}}
214211
)
215212
except Exception as e:
216213
logger.error(

llm-server/routes/chat/implementation/chain_strategy.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
from flask_socketio import emit
22
from models.repository.chat_history_repo import get_chat_message_as_llm_conversation
33
from routes.chat.implementation.handler_interface import ChatRequestHandler
4-
from typing import Callable, Dict, Optional
4+
from typing import Awaitable, Callable, Dict, Optional
55
import asyncio
66

7-
from custom_types.response_dict import ResponseDict
7+
from custom_types.response_dict import LLMResponse
88
from routes.flow.utils.api_retrievers import (
99
get_relevant_actions,
1010
get_relevant_flows,
@@ -32,12 +32,7 @@ async def handle_request(
3232
headers: Dict[str, str],
3333
app: Optional[str],
3434
is_streaming: bool,
35-
) -> ResponseDict:
36-
# Dict
37-
response: ResponseDict = {
38-
"error": "",
39-
"response": "Something went wrong, please try again!",
40-
}
35+
) -> LLMResponse:
4136
check_required_fields(base_prompt, text)
4237

4338
tasks = [
Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
from routes.chat.implementation.handler_interface import ChatRequestHandler
22
from typing import Dict, Optional
33

4-
from custom_types.response_dict import ResponseDict
4+
from custom_types.response_dict import LLMResponse
55
from asyncio import Future
66

77

88
class FunctionStrategy(ChatRequestHandler):
9-
def handle_request(
9+
async def handle_request(
1010
self,
1111
text: str,
1212
session_id: str,
@@ -15,6 +15,6 @@ def handle_request(
1515
headers: Dict[str, str],
1616
app: Optional[str],
1717
is_streaming: bool,
18-
) -> Future[ResponseDict]:
18+
) -> LLMResponse:
1919
# Extract relevant information from inputs
2020
raise NotImplementedError("Subclasses must override handle_request.")

llm-server/routes/chat/implementation/handler_interface.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from abc import ABC, abstractmethod
2-
from typing import Dict, Optional
3-
from custom_types.response_dict import ResponseDict
2+
from typing import Awaitable, Dict, Optional
3+
from custom_types.response_dict import LLMResponse
44
from asyncio import Future
55

66

@@ -15,5 +15,5 @@ async def handle_request(
1515
headers: Dict[str, str],
1616
app: Optional[str],
1717
is_streaming: bool,
18-
) -> Future[ResponseDict]:
18+
) -> LLMResponse:
1919
pass
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
from asyncio import Future
22
from routes.chat.implementation.handler_interface import ChatRequestHandler
3-
from typing import Dict, Optional
3+
from typing import Awaitable, Dict, Optional
44

5-
from custom_types.response_dict import ResponseDict
5+
from custom_types.response_dict import LLMResponse
66

77

88
class ToolStrategy(ChatRequestHandler):
9-
def handle_request(
9+
async def handle_request(
1010
self,
1111
text: str,
1212
session_id: str,
@@ -15,6 +15,6 @@ def handle_request(
1515
headers: Dict[str, str],
1616
app: Optional[str],
1717
is_streaming: bool,
18-
) -> Future[ResponseDict]:
18+
) -> LLMResponse:
1919
# Extract relevant information from inputs
2020
raise NotImplementedError("Subclasses must override handle_request.")

llm-server/routes/flow/utils/run_openapi_ops.py

Lines changed: 28 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import json
2-
from typing import Optional
2+
from typing import Optional, Tuple
33
from flask_socketio import emit
4-
from openai import InvalidRequestError
54

65
from werkzeug.datastructures import Headers
76
from requests.exceptions import MissingSchema
7+
from copilot_exceptions.api_call_failed_exception import APICallFailedException
88
from entities.flow_entity import FlowDTO
99
from extractors.convert_json_to_text import (
1010
convert_json_error_to_text,
@@ -29,7 +29,7 @@ async def run_actions(
2929
bot_id: str,
3030
session_id: str,
3131
is_streaming: bool,
32-
) -> str:
32+
) -> Tuple[str, dict]:
3333
api_request_data = {}
3434
prev_api_response = ""
3535
apis_calls_history = {}
@@ -69,21 +69,24 @@ async def run_actions(
6969
operation_id=operation_id,
7070
app=app,
7171
)
72-
apis_calls_history[operation_id] = api_response.text
72+
apis_calls_history[operation_id] = api_response.api_requests[
73+
"response"
74+
]
7375
else:
7476
logger.info(
7577
"API Response",
7678
incident="log_api_response",
77-
api_response=api_response.text,
79+
api_response=api_response.api_requests,
7880
json_config_used=partial_json,
7981
next_action="summarize_with_partial_json",
8082
)
81-
api_json = json.loads(api_response.text)
83+
api_json = json.loads(api_response.api_requests["response"])
8284
apis_calls_history[operation_id] = json.dumps(
8385
transform_response(
8486
full_json=api_json, partial_json=partial_json
8587
)
8688
)
89+
8790
except Exception as e:
8891
logger.error(
8992
"Error occurred during workflow check in store",
@@ -98,23 +101,23 @@ async def run_actions(
98101
formatted_error = convert_json_error_to_text(
99102
str(e), is_streaming, session_id
100103
)
101-
return str(formatted_error)
104+
return str(formatted_error), api_request_data
105+
106+
try:
107+
readable_response = convert_json_to_text(
108+
text,
109+
apis_calls_history,
110+
api_request_data,
111+
bot_id=bot_id,
112+
session_id=session_id,
113+
is_streaming=is_streaming,
114+
)
102115

103-
try:
104-
return convert_json_to_text(
105-
text,
106-
apis_calls_history,
107-
api_request_data,
108-
bot_id=bot_id,
109-
session_id=session_id,
110-
is_streaming=is_streaming,
111-
)
112-
except InvalidRequestError as e:
113-
error_message = (
114-
f"Api response too large for the endpoint: {api_payload.endpoint}"
115-
if api_payload is not None
116-
else ""
117-
)
118-
logger.error("OpenAI exception", bot_id=bot_id, error=str(e))
119-
emit(session_id, error_message) if is_streaming else None
120-
return error_message
116+
return readable_response, api_request_data
117+
except Exception as e:
118+
error_message = (
119+
f"{str(e)}: {api_payload.endpoint}" if api_payload is not None else ""
120+
)
121+
logger.error("OpenAI exception", bot_id=bot_id, error=str(e))
122+
emit(session_id, error_message) if is_streaming else None
123+
return error_message, api_request_data

0 commit comments

Comments
 (0)