diff --git a/.gitignore b/.gitignore index 1c2147a8..71e0ef8e 100644 --- a/.gitignore +++ b/.gitignore @@ -38,5 +38,5 @@ botpy.log* /poc /libs/wecom_api/test.py /venv -/jp-tyo-churros-05.rockchin.top -test.py \ No newline at end of file +test.py +/web_ui \ No newline at end of file diff --git a/pkg/provider/runners/difysvapi.py b/pkg/provider/runners/difysvapi.py index a33a3c0b..ef35e829 100644 --- a/pkg/provider/runners/difysvapi.py +++ b/pkg/provider/runners/difysvapi.py @@ -164,12 +164,14 @@ class DifyServiceAPIRunner(runner.RequestRunner): for image_id in image_ids ] - ignored_events = ["agent_message"] + ignored_events = [] inputs = {} inputs.update(query.variables) + pending_agent_message = '' + async for chunk in self.dify_client.chat_messages( inputs=inputs, query=plain_text, @@ -183,50 +185,55 @@ class DifyServiceAPIRunner(runner.RequestRunner): if chunk["event"] in ignored_events: continue - if chunk["event"] == "agent_thought": - - if chunk['tool'] != '' and chunk['observation'] != '': # 工具调用结果,跳过 - continue - - if chunk['thought'].strip() != '': # 文字回复内容 - msg = llm_entities.Message( - role="assistant", - content=chunk["thought"], - ) - yield msg - - if chunk['tool']: - msg = llm_entities.Message( - role="assistant", - tool_calls=[ - llm_entities.ToolCall( - id=chunk['id'], - type="function", - function=llm_entities.FunctionCall( - name=chunk["tool"], - arguments=json.dumps({}), - ), - ) - ], - ) - yield msg - if chunk['event'] == 'message_file': - - if chunk['type'] == 'image' and chunk['belongs_to'] == 'assistant': - - base_url = self.dify_client.base_url - - if base_url.endswith('/v1'): - base_url = base_url[:-3] - - image_url = base_url + chunk['url'] + if chunk['event'] == 'agent_message': + pending_agent_message += chunk['answer'] + else: + if pending_agent_message.strip() != '': + pending_agent_message = pending_agent_message.replace('Action:', '') yield llm_entities.Message( role="assistant", - content=[llm_entities.ContentElement.from_image_url(image_url)], + content=self._try_convert_thinking(pending_agent_message), ) - if chunk['event'] == 'error': - raise errors.DifyAPIError("dify 服务错误: " + chunk['message']) + pending_agent_message = '' + + if chunk["event"] == "agent_thought": + + if chunk['tool'] != '' and chunk['observation'] != '': # 工具调用结果,跳过 + continue + + if chunk['tool']: + msg = llm_entities.Message( + role="assistant", + tool_calls=[ + llm_entities.ToolCall( + id=chunk['id'], + type="function", + function=llm_entities.FunctionCall( + name=chunk["tool"], + arguments=json.dumps({}), + ), + ) + ], + ) + yield msg + if chunk['event'] == 'message_file': + + if chunk['type'] == 'image' and chunk['belongs_to'] == 'assistant': + + base_url = self.dify_client.base_url + + if base_url.endswith('/v1'): + base_url = base_url[:-3] + + image_url = base_url + chunk['url'] + + yield llm_entities.Message( + role="assistant", + content=[llm_entities.ContentElement.from_image_url(image_url)], + ) + if chunk['event'] == 'error': + raise errors.DifyAPIError("dify 服务错误: " + chunk['message']) query.session.using_conversation.uuid = chunk["conversation_id"] @@ -303,11 +310,11 @@ class DifyServiceAPIRunner(runner.RequestRunner): msg = llm_entities.Message( role="assistant", - content=chunk["data"]["outputs"][ + content=self._try_convert_thinking(chunk["data"]["outputs"][ self.ap.provider_cfg.data["dify-service-api"]["workflow"][ "output-key" ] - ], + ]), ) yield msg