2024-12-14 17:51:11 +08:00
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
|
|
import typing
|
|
|
|
|
|
import json
|
|
|
|
|
|
import uuid
|
2025-02-24 12:17:33 +08:00
|
|
|
|
import re
|
2024-12-24 11:26:33 +08:00
|
|
|
|
import base64
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-01-06 21:28:36 +08:00
|
|
|
|
|
2024-12-14 17:51:11 +08:00
|
|
|
|
from .. import runner
|
2025-03-29 17:50:45 +08:00
|
|
|
|
from ...core import app, entities as core_entities
|
2024-12-14 17:51:11 +08:00
|
|
|
|
from .. import entities as llm_entities
|
|
|
|
|
|
from ...utils import image
|
|
|
|
|
|
|
|
|
|
|
|
from libs.dify_service_api.v1 import client, errors
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
@runner.runner_class('dify-service-api')
|
2024-12-14 17:51:11 +08:00
|
|
|
|
class DifyServiceAPIRunner(runner.RequestRunner):
|
|
|
|
|
|
"""Dify Service API 对话请求器"""
|
|
|
|
|
|
|
|
|
|
|
|
dify_client: client.AsyncDifyServiceClient
|
|
|
|
|
|
|
2025-03-29 17:50:45 +08:00
|
|
|
|
def __init__(self, ap: app.Application, pipeline_config: dict):
|
|
|
|
|
|
self.ap = ap
|
|
|
|
|
|
self.pipeline_config = pipeline_config
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
valid_app_types = ['chat', 'agent', 'workflow']
|
2025-05-10 18:04:58 +08:00
|
|
|
|
if self.pipeline_config['ai']['dify-service-api']['app-type'] not in valid_app_types:
|
2024-12-16 23:54:56 +08:00
|
|
|
|
raise errors.DifyAPIError(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
f'不支持的 Dify 应用类型: {self.pipeline_config["ai"]["dify-service-api"]["app-type"]}'
|
2024-12-16 23:54:56 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
api_key = self.pipeline_config['ai']['dify-service-api']['api-key']
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
|
|
|
|
|
self.dify_client = client.AsyncDifyServiceClient(
|
|
|
|
|
|
api_key=api_key,
|
2025-04-29 17:24:07 +08:00
|
|
|
|
base_url=self.pipeline_config['ai']['dify-service-api']['base-url'],
|
2024-12-14 17:51:11 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
2025-02-24 12:17:33 +08:00
|
|
|
|
def _try_convert_thinking(self, resp_text: str) -> str:
|
|
|
|
|
|
"""尝试转换 Dify 的思考提示"""
|
2025-04-29 17:24:07 +08:00
|
|
|
|
if not resp_text.startswith(
|
|
|
|
|
|
'<details style="color:gray;background-color: #f8f8f8;padding: 8px;border-radius: 4px;" open> <summary> Thinking... </summary>'
|
|
|
|
|
|
):
|
2025-02-24 12:17:33 +08:00
|
|
|
|
return resp_text
|
|
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
if self.pipeline_config['ai']['dify-service-api']['thinking-convert'] == 'original':
|
2025-02-24 12:17:33 +08:00
|
|
|
|
return resp_text
|
2025-04-29 17:24:07 +08:00
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
if self.pipeline_config['ai']['dify-service-api']['thinking-convert'] == 'remove':
|
2025-04-29 17:24:07 +08:00
|
|
|
|
return re.sub(
|
|
|
|
|
|
r'<details style="color:gray;background-color: #f8f8f8;padding: 8px;border-radius: 4px;" open> <summary> Thinking... </summary>.*?</details>',
|
|
|
|
|
|
'',
|
|
|
|
|
|
resp_text,
|
|
|
|
|
|
flags=re.DOTALL,
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
if self.pipeline_config['ai']['dify-service-api']['thinking-convert'] == 'plain':
|
2025-02-24 12:17:33 +08:00
|
|
|
|
pattern = r'<details style="color:gray;background-color: #f8f8f8;padding: 8px;border-radius: 4px;" open> <summary> Thinking... </summary>(.*?)</details>'
|
|
|
|
|
|
thinking_text = re.search(pattern, resp_text, flags=re.DOTALL)
|
|
|
|
|
|
content_text = re.sub(pattern, '', resp_text, flags=re.DOTALL)
|
2025-04-29 17:24:07 +08:00
|
|
|
|
return f'<think>{thinking_text.group(1)}</think>\n{content_text}'
|
2025-02-24 12:17:33 +08:00
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
async def _preprocess_user_message(self, query: core_entities.Query) -> tuple[str, list[str]]:
|
2024-12-14 17:51:11 +08:00
|
|
|
|
"""预处理用户消息,提取纯文本,并将图片上传到 Dify 服务
|
2024-12-16 23:54:56 +08:00
|
|
|
|
|
2024-12-14 17:51:11 +08:00
|
|
|
|
Returns:
|
|
|
|
|
|
tuple[str, list[str]]: 纯文本和图片的 Dify 服务图片 ID
|
|
|
|
|
|
"""
|
2025-04-29 17:24:07 +08:00
|
|
|
|
plain_text = ''
|
2024-12-14 17:51:11 +08:00
|
|
|
|
image_ids = []
|
2025-03-02 18:49:32 +08:00
|
|
|
|
|
2024-12-14 17:51:11 +08:00
|
|
|
|
if isinstance(query.user_message.content, list):
|
|
|
|
|
|
for ce in query.user_message.content:
|
2025-04-29 17:24:07 +08:00
|
|
|
|
if ce.type == 'text':
|
2024-12-14 17:51:11 +08:00
|
|
|
|
plain_text += ce.text
|
2025-04-29 17:24:07 +08:00
|
|
|
|
elif ce.type == 'image_base64':
|
2025-05-10 18:04:58 +08:00
|
|
|
|
image_b64, image_format = await image.extract_b64_and_format(ce.image_base64)
|
2024-12-24 11:26:33 +08:00
|
|
|
|
file_bytes = base64.b64decode(image_b64)
|
2025-04-29 17:24:07 +08:00
|
|
|
|
file = ('img.png', file_bytes, f'image/{image_format}')
|
2024-12-16 23:54:56 +08:00
|
|
|
|
file_upload_resp = await self.dify_client.upload_file(
|
|
|
|
|
|
file,
|
2025-04-29 17:24:07 +08:00
|
|
|
|
f'{query.session.launcher_type.value}_{query.session.launcher_id}',
|
2024-12-16 23:54:56 +08:00
|
|
|
|
)
|
2025-04-29 17:24:07 +08:00
|
|
|
|
image_id = file_upload_resp['id']
|
2024-12-14 17:51:11 +08:00
|
|
|
|
image_ids.append(image_id)
|
|
|
|
|
|
elif isinstance(query.user_message.content, str):
|
|
|
|
|
|
plain_text = query.user_message.content
|
|
|
|
|
|
|
2025-03-02 19:10:09 +08:00
|
|
|
|
return plain_text, image_ids
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
async def _chat_messages(self, query: core_entities.Query) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
2024-12-14 17:51:11 +08:00
|
|
|
|
"""调用聊天助手"""
|
2025-04-29 17:24:07 +08:00
|
|
|
|
cov_id = query.session.using_conversation.uuid or ''
|
2025-05-20 15:32:04 +08:00
|
|
|
|
query.variables['conversation_id'] = cov_id
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-03-02 19:10:09 +08:00
|
|
|
|
plain_text, image_ids = await self._preprocess_user_message(query)
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2024-12-16 23:54:56 +08:00
|
|
|
|
files = [
|
|
|
|
|
|
{
|
2025-04-29 17:24:07 +08:00
|
|
|
|
'type': 'image',
|
|
|
|
|
|
'transfer_method': 'local_file',
|
|
|
|
|
|
'upload_file_id': image_id,
|
2024-12-16 23:54:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
for image_id in image_ids
|
|
|
|
|
|
]
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
mode = 'basic' # 标记是基础编排还是工作流编排
|
2024-12-17 01:04:08 +08:00
|
|
|
|
|
|
|
|
|
|
basic_mode_pending_chunk = ''
|
|
|
|
|
|
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs = {}
|
2025-04-29 17:24:07 +08:00
|
|
|
|
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs.update(query.variables)
|
|
|
|
|
|
|
2025-05-09 01:28:43 +00:00
|
|
|
|
chunk = None # 初始化chunk变量,防止在没有响应时引用错误
|
2025-03-12 19:13:04 +08:00
|
|
|
|
|
2024-12-17 00:41:28 +08:00
|
|
|
|
async for chunk in self.dify_client.chat_messages(
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs=inputs,
|
2024-12-16 23:54:56 +08:00
|
|
|
|
query=plain_text,
|
2025-04-29 17:24:07 +08:00
|
|
|
|
user=f'{query.session.launcher_type.value}_{query.session.launcher_id}',
|
2024-12-16 23:54:56 +08:00
|
|
|
|
conversation_id=cov_id,
|
|
|
|
|
|
files=files,
|
2025-05-12 18:21:08 +08:00
|
|
|
|
timeout=120,
|
2024-12-17 00:41:28 +08:00
|
|
|
|
):
|
2025-04-29 17:24:07 +08:00
|
|
|
|
self.ap.logger.debug('dify-chat-chunk: ' + str(chunk))
|
2024-12-17 01:04:08 +08:00
|
|
|
|
|
|
|
|
|
|
if chunk['event'] == 'workflow_started':
|
2025-04-29 17:24:07 +08:00
|
|
|
|
mode = 'workflow'
|
2024-12-17 01:04:08 +08:00
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
if mode == 'workflow':
|
2024-12-17 01:04:08 +08:00
|
|
|
|
if chunk['event'] == 'node_finished':
|
|
|
|
|
|
if chunk['data']['node_type'] == 'answer':
|
|
|
|
|
|
yield llm_entities.Message(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
role='assistant',
|
2025-05-10 18:04:58 +08:00
|
|
|
|
content=self._try_convert_thinking(chunk['data']['outputs']['answer']),
|
2024-12-17 01:04:08 +08:00
|
|
|
|
)
|
2025-04-29 17:24:07 +08:00
|
|
|
|
elif mode == 'basic':
|
2024-12-17 01:04:08 +08:00
|
|
|
|
if chunk['event'] == 'message':
|
|
|
|
|
|
basic_mode_pending_chunk += chunk['answer']
|
|
|
|
|
|
elif chunk['event'] == 'message_end':
|
2024-12-17 00:41:28 +08:00
|
|
|
|
yield llm_entities.Message(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
role='assistant',
|
2025-02-24 12:17:33 +08:00
|
|
|
|
content=self._try_convert_thinking(basic_mode_pending_chunk),
|
2024-12-17 00:41:28 +08:00
|
|
|
|
)
|
2024-12-17 01:04:08 +08:00
|
|
|
|
basic_mode_pending_chunk = ''
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-05-09 01:37:04 +00:00
|
|
|
|
if chunk is None:
|
2025-05-10 18:04:58 +08:00
|
|
|
|
raise errors.DifyAPIError('Dify API 没有返回任何响应,请检查网络连接和API配置')
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
query.session.using_conversation.uuid = chunk['conversation_id']
|
2024-12-17 00:41:28 +08:00
|
|
|
|
|
|
|
|
|
|
async def _agent_chat_messages(
|
|
|
|
|
|
self, query: core_entities.Query
|
|
|
|
|
|
) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
|
|
|
|
|
"""调用聊天助手"""
|
2025-04-29 17:24:07 +08:00
|
|
|
|
cov_id = query.session.using_conversation.uuid or ''
|
2025-05-20 15:32:04 +08:00
|
|
|
|
query.variables['conversation_id'] = cov_id
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-03-02 19:10:09 +08:00
|
|
|
|
plain_text, image_ids = await self._preprocess_user_message(query)
|
2024-12-17 00:41:28 +08:00
|
|
|
|
|
|
|
|
|
|
files = [
|
|
|
|
|
|
{
|
2025-04-29 17:24:07 +08:00
|
|
|
|
'type': 'image',
|
|
|
|
|
|
'transfer_method': 'local_file',
|
|
|
|
|
|
'upload_file_id': image_id,
|
2024-12-17 00:41:28 +08:00
|
|
|
|
}
|
|
|
|
|
|
for image_id in image_ids
|
|
|
|
|
|
]
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-04-23 16:55:52 +08:00
|
|
|
|
ignored_events = []
|
2024-12-17 00:41:28 +08:00
|
|
|
|
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs = {}
|
2025-04-29 17:24:07 +08:00
|
|
|
|
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs.update(query.variables)
|
|
|
|
|
|
|
2025-04-23 16:55:52 +08:00
|
|
|
|
pending_agent_message = ''
|
2025-05-10 17:47:14 +08:00
|
|
|
|
|
2025-05-09 01:28:43 +00:00
|
|
|
|
chunk = None # 初始化chunk变量,防止在没有响应时引用错误
|
2025-04-23 16:55:52 +08:00
|
|
|
|
|
2024-12-17 00:41:28 +08:00
|
|
|
|
async for chunk in self.dify_client.chat_messages(
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs=inputs,
|
2024-12-17 00:41:28 +08:00
|
|
|
|
query=plain_text,
|
2025-04-29 17:24:07 +08:00
|
|
|
|
user=f'{query.session.launcher_type.value}_{query.session.launcher_id}',
|
|
|
|
|
|
response_mode='streaming',
|
2024-12-17 00:41:28 +08:00
|
|
|
|
conversation_id=cov_id,
|
|
|
|
|
|
files=files,
|
2025-05-12 18:21:08 +08:00
|
|
|
|
timeout=120,
|
2024-12-17 00:41:28 +08:00
|
|
|
|
):
|
2025-04-29 17:24:07 +08:00
|
|
|
|
self.ap.logger.debug('dify-agent-chunk: ' + str(chunk))
|
2025-01-06 21:28:36 +08:00
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
if chunk['event'] in ignored_events:
|
2024-12-17 00:41:28 +08:00
|
|
|
|
continue
|
|
|
|
|
|
|
2025-04-23 16:55:52 +08:00
|
|
|
|
if chunk['event'] == 'agent_message':
|
|
|
|
|
|
pending_agent_message += chunk['answer']
|
|
|
|
|
|
else:
|
|
|
|
|
|
if pending_agent_message.strip() != '':
|
2025-05-10 18:04:58 +08:00
|
|
|
|
pending_agent_message = pending_agent_message.replace('</details>Action:', '</details>')
|
2025-04-23 16:55:52 +08:00
|
|
|
|
yield llm_entities.Message(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
role='assistant',
|
2025-04-23 16:55:52 +08:00
|
|
|
|
content=self._try_convert_thinking(pending_agent_message),
|
2024-12-17 00:41:28 +08:00
|
|
|
|
)
|
2025-04-23 16:55:52 +08:00
|
|
|
|
pending_agent_message = ''
|
2025-01-06 21:28:36 +08:00
|
|
|
|
|
2025-05-10 17:47:14 +08:00
|
|
|
|
if chunk['event'] == 'agent_thought':
|
2025-05-10 18:04:58 +08:00
|
|
|
|
if chunk['tool'] != '' and chunk['observation'] != '': # 工具调用结果,跳过
|
2025-04-23 16:55:52 +08:00
|
|
|
|
continue
|
2025-01-06 21:28:36 +08:00
|
|
|
|
|
2025-04-23 16:55:52 +08:00
|
|
|
|
if chunk['tool']:
|
|
|
|
|
|
msg = llm_entities.Message(
|
2025-05-10 17:47:14 +08:00
|
|
|
|
role='assistant',
|
2025-04-23 16:55:52 +08:00
|
|
|
|
tool_calls=[
|
|
|
|
|
|
llm_entities.ToolCall(
|
|
|
|
|
|
id=chunk['id'],
|
2025-05-10 17:47:14 +08:00
|
|
|
|
type='function',
|
2025-04-23 16:55:52 +08:00
|
|
|
|
function=llm_entities.FunctionCall(
|
2025-05-10 17:47:14 +08:00
|
|
|
|
name=chunk['tool'],
|
2025-04-23 16:55:52 +08:00
|
|
|
|
arguments=json.dumps({}),
|
|
|
|
|
|
),
|
|
|
|
|
|
)
|
|
|
|
|
|
],
|
|
|
|
|
|
)
|
|
|
|
|
|
yield msg
|
|
|
|
|
|
if chunk['event'] == 'message_file':
|
|
|
|
|
|
if chunk['type'] == 'image' and chunk['belongs_to'] == 'assistant':
|
|
|
|
|
|
base_url = self.dify_client.base_url
|
2025-01-06 21:28:36 +08:00
|
|
|
|
|
2025-04-23 16:55:52 +08:00
|
|
|
|
if base_url.endswith('/v1'):
|
|
|
|
|
|
base_url = base_url[:-3]
|
|
|
|
|
|
|
|
|
|
|
|
image_url = base_url + chunk['url']
|
|
|
|
|
|
|
|
|
|
|
|
yield llm_entities.Message(
|
2025-05-10 17:47:14 +08:00
|
|
|
|
role='assistant',
|
2025-05-10 18:04:58 +08:00
|
|
|
|
content=[llm_entities.ContentElement.from_image_url(image_url)],
|
2025-04-23 16:55:52 +08:00
|
|
|
|
)
|
|
|
|
|
|
if chunk['event'] == 'error':
|
2025-05-10 17:47:14 +08:00
|
|
|
|
raise errors.DifyAPIError('dify 服务错误: ' + chunk['message'])
|
|
|
|
|
|
|
2025-05-09 01:37:04 +00:00
|
|
|
|
if chunk is None:
|
2025-05-10 18:04:58 +08:00
|
|
|
|
raise errors.DifyAPIError('Dify API 没有返回任何响应,请检查网络连接和API配置')
|
2024-12-17 00:41:28 +08:00
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
query.session.using_conversation.uuid = chunk['conversation_id']
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
async def _workflow_messages(self, query: core_entities.Query) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
2024-12-14 17:51:11 +08:00
|
|
|
|
"""调用工作流"""
|
|
|
|
|
|
|
|
|
|
|
|
if not query.session.using_conversation.uuid:
|
|
|
|
|
|
query.session.using_conversation.uuid = str(uuid.uuid4())
|
2025-04-29 17:24:07 +08:00
|
|
|
|
|
|
|
|
|
|
query.variables['conversation_id'] = query.session.using_conversation.uuid
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2025-03-02 19:10:53 +08:00
|
|
|
|
plain_text, image_ids = await self._preprocess_user_message(query)
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
2024-12-16 23:54:56 +08:00
|
|
|
|
files = [
|
|
|
|
|
|
{
|
2025-04-29 17:24:07 +08:00
|
|
|
|
'type': 'image',
|
|
|
|
|
|
'transfer_method': 'local_file',
|
|
|
|
|
|
'upload_file_id': image_id,
|
2024-12-16 23:54:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
for image_id in image_ids
|
|
|
|
|
|
]
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
ignored_events = ['text_chunk', 'workflow_started']
|
2024-12-16 23:54:56 +08:00
|
|
|
|
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs = { # these variables are legacy variables, we need to keep them for compatibility
|
2025-04-29 17:24:07 +08:00
|
|
|
|
'langbot_user_message_text': plain_text,
|
|
|
|
|
|
'langbot_session_id': query.variables['session_id'],
|
|
|
|
|
|
'langbot_conversation_id': query.variables['conversation_id'],
|
|
|
|
|
|
'langbot_msg_create_time': query.variables['msg_create_time'],
|
2025-03-12 19:13:04 +08:00
|
|
|
|
}
|
2025-04-29 17:24:07 +08:00
|
|
|
|
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs.update(query.variables)
|
|
|
|
|
|
|
2024-12-16 23:54:56 +08:00
|
|
|
|
async for chunk in self.dify_client.workflow_run(
|
2025-03-12 19:13:04 +08:00
|
|
|
|
inputs=inputs,
|
2025-04-29 17:24:07 +08:00
|
|
|
|
user=f'{query.session.launcher_type.value}_{query.session.launcher_id}',
|
2024-12-16 23:54:56 +08:00
|
|
|
|
files=files,
|
2025-05-12 18:21:08 +08:00
|
|
|
|
timeout=120,
|
2024-12-16 23:54:56 +08:00
|
|
|
|
):
|
2025-04-29 17:24:07 +08:00
|
|
|
|
self.ap.logger.debug('dify-workflow-chunk: ' + str(chunk))
|
|
|
|
|
|
if chunk['event'] in ignored_events:
|
2024-12-14 17:51:11 +08:00
|
|
|
|
continue
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
if chunk['event'] == 'node_started':
|
2025-05-10 18:04:58 +08:00
|
|
|
|
if chunk['data']['node_type'] == 'start' or chunk['data']['node_type'] == 'end':
|
2024-12-14 17:51:11 +08:00
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
msg = llm_entities.Message(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
role='assistant',
|
2024-12-14 17:51:11 +08:00
|
|
|
|
content=None,
|
2024-12-16 23:54:56 +08:00
|
|
|
|
tool_calls=[
|
|
|
|
|
|
llm_entities.ToolCall(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
id=chunk['data']['node_id'],
|
|
|
|
|
|
type='function',
|
2024-12-16 23:54:56 +08:00
|
|
|
|
function=llm_entities.FunctionCall(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
name=chunk['data']['title'],
|
2024-12-16 23:54:56 +08:00
|
|
|
|
arguments=json.dumps({}),
|
|
|
|
|
|
),
|
|
|
|
|
|
)
|
|
|
|
|
|
],
|
2024-12-14 17:51:11 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
yield msg
|
|
|
|
|
|
|
2025-04-29 17:24:07 +08:00
|
|
|
|
elif chunk['event'] == 'workflow_finished':
|
2024-12-17 01:04:08 +08:00
|
|
|
|
if chunk['data']['error']:
|
|
|
|
|
|
raise errors.DifyAPIError(chunk['data']['error'])
|
2024-12-14 17:51:11 +08:00
|
|
|
|
|
|
|
|
|
|
msg = llm_entities.Message(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
role='assistant',
|
|
|
|
|
|
content=chunk['data']['outputs']['summary'],
|
2024-12-14 17:51:11 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
yield msg
|
|
|
|
|
|
|
2025-05-10 18:04:58 +08:00
|
|
|
|
async def run(self, query: core_entities.Query) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
2024-12-14 17:51:11 +08:00
|
|
|
|
"""运行请求"""
|
2025-04-29 17:24:07 +08:00
|
|
|
|
if self.pipeline_config['ai']['dify-service-api']['app-type'] == 'chat':
|
2024-12-14 17:51:11 +08:00
|
|
|
|
async for msg in self._chat_messages(query):
|
|
|
|
|
|
yield msg
|
2025-04-29 17:24:07 +08:00
|
|
|
|
elif self.pipeline_config['ai']['dify-service-api']['app-type'] == 'agent':
|
2024-12-17 00:41:28 +08:00
|
|
|
|
async for msg in self._agent_chat_messages(query):
|
|
|
|
|
|
yield msg
|
2025-04-29 17:24:07 +08:00
|
|
|
|
elif self.pipeline_config['ai']['dify-service-api']['app-type'] == 'workflow':
|
2024-12-14 17:51:11 +08:00
|
|
|
|
async for msg in self._workflow_messages(query):
|
|
|
|
|
|
yield msg
|
|
|
|
|
|
else:
|
2024-12-16 23:54:56 +08:00
|
|
|
|
raise errors.DifyAPIError(
|
2025-04-29 17:24:07 +08:00
|
|
|
|
f'不支持的 Dify 应用类型: {self.pipeline_config["ai"]["dify-service-api"]["app-type"]}'
|
2024-12-16 23:54:56 +08:00
|
|
|
|
)
|