Skip to content

Commit

Permalink
fix: Lost content during the answering process (#2256)
Browse files Browse the repository at this point in the history
  • Loading branch information
shaohuzhang1 authored Feb 12, 2025
1 parent 2ba7a24 commit 6a5ec86
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,12 @@ def event_content(response,
all_text = ''
reasoning_content = ''
try:
response_reasoning_content = False
for chunk in response:
reasoning_chunk = reasoning.get_reasoning_content(chunk)
content_chunk = reasoning_chunk.get('content')
if 'reasoning_content' in chunk.additional_kwargs:
response_reasoning_content = True
reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '')
else:
reasoning_content_chunk = reasoning_chunk.get('reasoning_content')
Expand All @@ -95,6 +97,21 @@ def event_content(response,
'node_type': 'ai-chat-node',
'real_node_id': 'ai-chat-node',
'reasoning_content': reasoning_content_chunk if reasoning_content_enable else ''})
reasoning_chunk = reasoning.get_end_reasoning_content()
all_text += reasoning_chunk.get('content')
reasoning_content_chunk = ""
if not response_reasoning_content:
reasoning_content_chunk = reasoning_chunk.get(
'reasoning_content')
yield manage.get_base_to_response().to_stream_chunk_response(chat_id, str(chat_record_id), 'ai-chat-node',
[], reasoning_chunk.get('content'),
False,
0, 0, {'node_is_end': False,
'view_type': 'many_view',
'node_type': 'ai-chat-node',
'real_node_id': 'ai-chat-node',
'reasoning_content'
: reasoning_content_chunk if reasoning_content_enable else ''})
# 获取token
if is_ai_chat:
try:
Expand Down Expand Up @@ -276,11 +293,13 @@ def execute_block(self, message_list: List[BaseMessage],
response_token = 0
write_context(self, manage, request_token, response_token, chat_result.content)
reasoning_result = reasoning.get_reasoning_content(chat_result)
content = reasoning_result.get('content')
reasoning_result_end = reasoning.get_end_reasoning_content()
content = reasoning_result.get('content') + reasoning_result_end.get('content')
if 'reasoning_content' in chat_result.response_metadata:
reasoning_content = chat_result.response_metadata.get('reasoning_content', '')
else:
reasoning_content = reasoning_result.get('reasoning_content')
reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get(
'reasoning_content')
post_response_handler.handler(chat_id, chat_record_id, paragraph_list, problem_text,
chat_result.content, manage, self, padding_problem_text, client_id,
reasoning_content=reasoning_content if reasoning_content_enable else '')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,12 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
'reasoning_content_start': '<think>'})
reasoning = Reasoning(model_setting.get('reasoning_content_start', '<think>'),
model_setting.get('reasoning_content_end', '</think>'))
response_reasoning_content = False
for chunk in response:
reasoning_chunk = reasoning.get_reasoning_content(chunk)
content_chunk = reasoning_chunk.get('content')
if 'reasoning_content' in chunk.additional_kwargs:
response_reasoning_content = True
reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '')
else:
reasoning_content_chunk = reasoning_chunk.get('reasoning_content')
Expand All @@ -69,6 +71,16 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
yield {'content': content_chunk,
'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable',
False) else ''}

reasoning_chunk = reasoning.get_end_reasoning_content()
answer += reasoning_chunk.get('content')
reasoning_content_chunk = ""
if not response_reasoning_content:
reasoning_content_chunk = reasoning_chunk.get(
'reasoning_content')
yield {'content': reasoning_chunk.get('content'),
'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable',
False) else ''}
_write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content)


Expand All @@ -86,11 +98,12 @@ def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, wor
'reasoning_content_start': '<think>'})
reasoning = Reasoning(model_setting.get('reasoning_content_start'), model_setting.get('reasoning_content_end'))
reasoning_result = reasoning.get_reasoning_content(response)
content = reasoning_result.get('content')
reasoning_result_end = reasoning.get_end_reasoning_content()
content = reasoning_result.get('content') + reasoning_result_end.get('content')
if 'reasoning_content' in response.response_metadata:
reasoning_content = response.response_metadata.get('reasoning_content', '')
else:
reasoning_content = reasoning_result.get('reasoning_content')
reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get('reasoning_content')
_write_context(node_variable, workflow_variable, node, workflow, content, reasoning_content)


Expand Down
14 changes: 12 additions & 2 deletions apps/application/flow/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,17 @@ def __init__(self, reasoning_content_start, reasoning_content_end):
self.reasoning_content_is_end = False
self.reasoning_content_chunk = ""

def get_end_reasoning_content(self):
if not self.reasoning_content_is_start and not self.reasoning_content_is_end:
r = {'content': self.all_content, 'reasoning_content': ''}
self.reasoning_content_chunk = ""
return r
if self.reasoning_content_is_start and not self.reasoning_content_is_end:
r = {'content': '', 'reasoning_content': self.reasoning_content_chunk}
self.reasoning_content_chunk = ""
return r
return {'content': '', 'reasoning_content': ''}

def get_reasoning_content(self, chunk):
# 如果没有开始思考过程标签那么就全是结果
if self.reasoning_content_start_tag is None or len(self.reasoning_content_start_tag) == 0:
Expand Down Expand Up @@ -60,8 +71,7 @@ def get_reasoning_content(self, chunk):
return {'content': chunk.content, 'reasoning_content': ''}
# 是否包含结束
if reasoning_content_end_tag_prefix_index > -1:
if len(
self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index > self.reasoning_content_end_tag_len:
if len(self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index >= self.reasoning_content_end_tag_len:
reasoning_content_end_tag_index = self.reasoning_content_chunk.find(self.reasoning_content_end_tag)
if reasoning_content_end_tag_index > -1:
reasoning_content_chunk = self.reasoning_content_chunk[0:reasoning_content_end_tag_index]
Expand Down
7 changes: 5 additions & 2 deletions ui/src/api/type/application.ts
Original file line number Diff line number Diff line change
Expand Up @@ -158,12 +158,15 @@ export class ChatRecordManage {
get_run_node() {
if (
this.write_node_info &&
(this.write_node_info.current_node.buffer.length > 0 ||
(this.write_node_info.current_node.reasoning_content_buffer.length > 0 ||
this.write_node_info.current_node.buffer.length > 0 ||
!this.write_node_info.current_node.is_end)
) {
return this.write_node_info
}
const run_node = this.node_list.filter((item) => item.buffer.length > 0 || !item.is_end)[0]
const run_node = this.node_list.filter(
(item) => item.reasoning_content_buffer.length > 0 || item.buffer.length > 0 || !item.is_end
)[0]

if (run_node) {
const index = this.node_list.indexOf(run_node)
Expand Down

0 comments on commit 6a5ec86

Please sign in to comment.