Skip to content

Commit

Permalink
Merge branch 'langgenius:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
leslie2046 authored Apr 17, 2024
2 parents 2731299 + e212a87 commit b513209
Show file tree
Hide file tree
Showing 35 changed files with 788 additions and 25 deletions.
29 changes: 29 additions & 0 deletions api/core/model_runtime/entities/message_entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,14 @@ class PromptMessage(ABC, BaseModel):
content: Optional[str | list[PromptMessageContent]] = None
name: Optional[str] = None

def is_empty(self) -> bool:
"""
Check if prompt message is empty.
:return: True if prompt message is empty, False otherwise
"""
return not self.content


class UserPromptMessage(PromptMessage):
"""
Expand Down Expand Up @@ -118,6 +126,16 @@ class ToolCallFunction(BaseModel):
role: PromptMessageRole = PromptMessageRole.ASSISTANT
tool_calls: list[ToolCall] = []

def is_empty(self) -> bool:
"""
Check if prompt message is empty.
:return: True if prompt message is empty, False otherwise
"""
if not super().is_empty() and not self.tool_calls:
return False

return True

class SystemPromptMessage(PromptMessage):
"""
Expand All @@ -132,3 +150,14 @@ class ToolPromptMessage(PromptMessage):
"""
role: PromptMessageRole = PromptMessageRole.TOOL
tool_call_id: str

def is_empty(self) -> bool:
"""
Check if prompt message is empty.
:return: True if prompt message is empty, False otherwise
"""
if not super().is_empty() and not self.tool_call_id:
return False

return True
4 changes: 2 additions & 2 deletions api/core/model_runtime/model_providers/nvidia/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def _validate_credentials(self, model: str, credentials: dict) -> None:
endpoint_url,
headers=headers,
json=data,
timeout=(10, 60)
timeout=(10, 300)
)

if response.status_code != 200:
Expand Down Expand Up @@ -232,7 +232,7 @@ def _generate(self, model: str, credentials: dict, prompt_messages: list[PromptM
endpoint_url,
headers=headers,
json=data,
timeout=(10, 60),
timeout=(10, 300),
stream=stream
)

Expand Down
2 changes: 1 addition & 1 deletion api/core/model_runtime/model_providers/ollama/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def _generate(self, model: str, credentials: dict,
endpoint_url,
headers=headers,
json=data,
timeout=(10, 60),
timeout=(10, 300),
stream=stream
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def validate_credentials(self, model: str, credentials: dict) -> None:
endpoint_url,
headers=headers,
json=data,
timeout=(10, 60)
timeout=(10, 300)
)

if response.status_code != 200:
Expand Down Expand Up @@ -334,7 +334,7 @@ def _generate(self, model: str, credentials: dict, prompt_messages: list[PromptM
endpoint_url,
headers=headers,
json=data,
timeout=(10, 60),
timeout=(10, 300),
stream=stream
)

Expand Down
2 changes: 1 addition & 1 deletion api/core/tools/provider/builtin/jina/tools/jina_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def _invoke(self,
url = tool_parameters['url']

headers = {
'Accept': 'text/event-stream'
'Accept': 'application/json'
}

response = ssrf_proxy.get(
Expand Down
12 changes: 11 additions & 1 deletion api/core/workflow/nodes/llm/llm_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,11 @@ def _fetch_prompt_messages(self, node_data: LLMNodeData,
stop = model_config.stop

vision_enabled = node_data.vision.enabled
filtered_prompt_messages = []
for prompt_message in prompt_messages:
if prompt_message.is_empty():
continue

if not isinstance(prompt_message.content, str):
prompt_message_content = []
for content_item in prompt_message.content:
Expand All @@ -453,7 +457,13 @@ def _fetch_prompt_messages(self, node_data: LLMNodeData,
and prompt_message_content[0].type == PromptMessageContentType.TEXT):
prompt_message.content = prompt_message_content[0].data

return prompt_messages, stop
filtered_prompt_messages.append(prompt_message)

if not filtered_prompt_messages:
raise ValueError("No prompt found in the LLM configuration. "
"Please ensure a prompt is properly configured before proceeding.")

return filtered_prompt_messages, stop

@classmethod
def deduct_llm_quota(cls, tenant_id: str, model_instance: ModelInstance, usage: LLMUsage) -> None:
Expand Down
9 changes: 6 additions & 3 deletions web/app/components/app/chat/log/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@ const Log: FC<LogProps> = ({
logItem,
}) => {
const { t } = useTranslation()
const { setCurrentLogItem, setShowPromptLogModal, setShowMessageLogModal } = useAppStore()
const { workflow_run_id: runID } = logItem
const { setCurrentLogItem, setShowPromptLogModal, setShowAgentLogModal, setShowMessageLogModal } = useAppStore()
const { workflow_run_id: runID, agent_thoughts } = logItem
const isAgent = agent_thoughts && agent_thoughts.length > 0

return (
<div
Expand All @@ -23,12 +24,14 @@ const Log: FC<LogProps> = ({
setCurrentLogItem(logItem)
if (runID)
setShowMessageLogModal(true)
else if (isAgent)
setShowAgentLogModal(true)
else
setShowPromptLogModal(true)
}}
>
<File02 className='mr-1 w-4 h-4' />
<div className='text-xs leading-4'>{runID ? t('appLog.viewLog') : t('appLog.promptLog')}</div>
<div className='text-xs leading-4'>{runID ? t('appLog.viewLog') : isAgent ? t('appLog.agentLog') : t('appLog.promptLog')}</div>
</div>
)
}
Expand Down
3 changes: 3 additions & 0 deletions web/app/components/app/chat/type.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,9 @@ export type IChatItem = {
agent_thoughts?: ThoughtItem[]
message_files?: VisionFile[]
workflow_run_id?: string
// for agent log
conversationId?: string
input?: any
}

export type MessageEnd = {
Expand Down
2 changes: 1 addition & 1 deletion web/app/components/app/configuration/debug/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ const Debug: FC<IDebug> = ({
)}
</div>
)}
{showPromptLogModal && (
{mode === AppType.completion && showPromptLogModal && (
<PromptLogModal
width={width}
currentLogItem={currentLogItem}
Expand Down
6 changes: 3 additions & 3 deletions web/app/components/app/configuration/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ import { fetchCollectionList } from '@/service/tools'
import { type Collection } from '@/app/components/tools/types'
import { useStore as useAppStore } from '@/app/components/app/store'

type PublichConfig = {
type PublishConfig = {
modelConfig: ModelConfig
completionParams: FormValue
}
Expand All @@ -74,7 +74,7 @@ const Configuration: FC = () => {
const matched = pathname.match(/\/app\/([^/]+)/)
const appId = (matched?.length && matched[1]) ? matched[1] : ''
const [mode, setMode] = useState('')
const [publishedConfig, setPublishedConfig] = useState<PublichConfig | null>(null)
const [publishedConfig, setPublishedConfig] = useState<PublishConfig | null>(null)

const modalConfig = useMemo(() => appDetail?.model_config || {} as BackendModelConfig, [appDetail])
const [conversationId, setConversationId] = useState<string | null>('')
Expand Down Expand Up @@ -225,7 +225,7 @@ const Configuration: FC = () => {

const [isShowHistoryModal, { setTrue: showHistoryModal, setFalse: hideHistoryModal }] = useBoolean(false)

const syncToPublishedConfig = (_publishedConfig: PublichConfig) => {
const syncToPublishedConfig = (_publishedConfig: PublishConfig) => {
const modelConfig = _publishedConfig.modelConfig
setModelConfig(_publishedConfig.modelConfig)
setCompletionParams(_publishedConfig.completionParams)
Expand Down
24 changes: 20 additions & 4 deletions web/app/components/app/log/list.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import ModelName from '@/app/components/header/account-setting/model-provider-pa
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
import TextGeneration from '@/app/components/app/text-generate/item'
import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils'
import AgentLogModal from '@/app/components/base/agent-log-modal'
import PromptLogModal from '@/app/components/base/prompt-log-modal'
import MessageLogModal from '@/app/components/base/message-log-modal'
import { useStore as useAppStore } from '@/app/components/app/store'
Expand Down Expand Up @@ -76,7 +77,7 @@ const PARAM_MAP = {
}

// Format interface data for easy display
const getFormattedChatList = (messages: ChatMessage[]) => {
const getFormattedChatList = (messages: ChatMessage[], conversationId: string) => {
const newChatList: IChatItem[] = []
messages.forEach((item: ChatMessage) => {
newChatList.push({
Expand Down Expand Up @@ -107,6 +108,11 @@ const getFormattedChatList = (messages: ChatMessage[]) => {
: []),
],
workflow_run_id: item.workflow_run_id,
conversationId,
input: {
inputs: item.inputs,
query: item.query,
},
more: {
time: dayjs.unix(item.created_at).format('hh:mm A'),
tokens: item.answer_tokens + item.message_tokens,
Expand Down Expand Up @@ -148,7 +154,7 @@ type IDetailPanel<T> = {

function DetailPanel<T extends ChatConversationFullDetailResponse | CompletionConversationFullDetailResponse>({ detail, onFeedback }: IDetailPanel<T>) {
const { onClose, appDetail } = useContext(DrawerContext)
const { currentLogItem, setCurrentLogItem, showPromptLogModal, setShowPromptLogModal, showMessageLogModal, setShowMessageLogModal } = useAppStore()
const { currentLogItem, setCurrentLogItem, showPromptLogModal, setShowPromptLogModal, showAgentLogModal, setShowAgentLogModal, showMessageLogModal, setShowMessageLogModal } = useAppStore()
const { t } = useTranslation()
const [items, setItems] = React.useState<IChatItem[]>([])
const [hasMore, setHasMore] = useState(true)
Expand All @@ -172,7 +178,7 @@ function DetailPanel<T extends ChatConversationFullDetailResponse | CompletionCo
const varValues = messageRes.data[0].inputs
setVarValues(varValues)
}
const newItems = [...getFormattedChatList(messageRes.data), ...items]
const newItems = [...getFormattedChatList(messageRes.data, detail.id), ...items]
if (messageRes.has_more === false && detail?.model_config?.configs?.introduction) {
newItems.unshift({
id: 'introduction',
Expand Down Expand Up @@ -401,6 +407,16 @@ function DetailPanel<T extends ChatConversationFullDetailResponse | CompletionCo
}}
/>
)}
{showAgentLogModal && (
<AgentLogModal
width={width}
currentLogItem={currentLogItem}
onCancel={() => {
setCurrentLogItem()
setShowAgentLogModal(false)
}}
/>
)}
{showMessageLogModal && (
<MessageLogModal
width={width}
Expand Down Expand Up @@ -607,7 +623,7 @@ const ConversationList: FC<IConversationList> = ({ logs, appDetail, onRefresh })
onClose={onCloseDrawer}
mask={isMobile}
footer={null}
panelClassname='mt-16 mx-2 sm:mr-2 mb-3 !p-0 !max-w-[640px] rounded-xl'
panelClassname='mt-16 mx-2 sm:mr-2 mb-4 !p-0 !max-w-[640px] rounded-xl'
>
<DrawerContext.Provider value={{
onClose: onCloseDrawer,
Expand Down
4 changes: 4 additions & 0 deletions web/app/components/app/store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ type State = {
appSidebarExpand: string
currentLogItem?: IChatItem
showPromptLogModal: boolean
showAgentLogModal: boolean
showMessageLogModal: boolean
}

Expand All @@ -15,6 +16,7 @@ type Action = {
setAppSiderbarExpand: (state: string) => void
setCurrentLogItem: (item?: IChatItem) => void
setShowPromptLogModal: (showPromptLogModal: boolean) => void
setShowAgentLogModal: (showAgentLogModal: boolean) => void
setShowMessageLogModal: (showMessageLogModal: boolean) => void
}

Expand All @@ -27,6 +29,8 @@ export const useStore = create<State & Action>(set => ({
setCurrentLogItem: currentLogItem => set(() => ({ currentLogItem })),
showPromptLogModal: false,
setShowPromptLogModal: showPromptLogModal => set(() => ({ showPromptLogModal })),
showAgentLogModal: false,
setShowAgentLogModal: showAgentLogModal => set(() => ({ showAgentLogModal })),
showMessageLogModal: false,
setShowMessageLogModal: showMessageLogModal => set(() => ({ showMessageLogModal })),
}))
Loading

0 comments on commit b513209

Please sign in to comment.