1
1
import asyncio , websockets , os , sys , json , ssl
2
+ from datetime import datetime
2
3
from typing import Any
3
4
from langchain_openai import ChatOpenAI
4
5
from langchain .memory import ConversationBufferWindowMemory , ConversationBufferMemory
20
21
"gpt-4" : 4096 ,
21
22
"gpt-4-turbo" : 8192
22
23
}
24
+ start_time = 0
23
25
24
26
# ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
25
27
# ssl_context.load_cert_chain(certfile='leither.uk.orig.pem', keyfile='leither.uk.cert.pem')
@@ -53,6 +55,7 @@ async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
53
55
while True :
54
56
try :
55
57
async for message in websocket :
58
+ start_time = datetime .now ()
56
59
event = json .loads (message )
57
60
params = event ["parameters" ]
58
61
if params ["llm" ] == "openai" :
@@ -64,9 +67,6 @@ async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
64
67
elif params ["llm" ] == "qianfan" :
65
68
pass
66
69
67
- # if params["client"] == "mobile":
68
- # CHAT_LLM.streaming = False
69
-
70
70
if "rawtext" in event ["input" ]:
71
71
print (message )
72
72
# the request is from secretary APP. If it is too long, seperate it.
@@ -89,7 +89,7 @@ async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
89
89
print (chunk .content , end = "|" , flush = True ) # chunk size can be big
90
90
resp += chunk .content
91
91
await websocket .send (json .dumps ({"type" : "stream" , "data" : chunk .content }))
92
- print (cb )
92
+ print (' \n ' , cb )
93
93
sys .stdout .flush ()
94
94
await websocket .send (json .dumps ({
95
95
"type" : "result" ,
@@ -115,7 +115,7 @@ async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
115
115
chain = ConversationChain (llm = CHAT_LLM , memory = memory , output_parser = StrOutputParser ())
116
116
async for chunk in chain .astream (event ["input" ]["query" ]):
117
117
print (chunk , end = "|" , flush = True ) # chunk size can be big
118
- print (cb )
118
+ print (' \n ' , cb )
119
119
sys .stdout .flush ()
120
120
await websocket .send (json .dumps ({
121
121
"type" : "result" ,
@@ -124,14 +124,11 @@ async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
124
124
"cost" : cb .total_cost }))
125
125
126
126
except websockets .exceptions .WebSocketException as e :
127
- # keep abnormal messages from logging
128
- # print("Error:", type(e), e)
129
- pass
130
- finally :
131
127
try :
132
128
await websocket .close ()
133
- except NameError :
134
- pass
129
+ finally :
130
+ print ("Websocket closed abnormally" , e )
131
+ break
135
132
136
133
async def main ():
137
134
# async with websockets.serve(handler, "", 8505, ssl=ssl_context):
0 commit comments