mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 19:37:36 +08:00
fix: bug after rebase
This commit is contained in:
@@ -54,10 +54,9 @@ class PreProcessor(stage.PipelineStage):
|
||||
query.prompt = conversation.prompt.copy()
|
||||
query.messages = conversation.messages.copy()
|
||||
|
||||
query.use_llm_model_uuid = llm_model.model_entity.uuid
|
||||
|
||||
if selected_runner == 'local-agent':
|
||||
query.use_funcs = []
|
||||
query.use_llm_model_uuid = llm_model.model_entity.uuid
|
||||
|
||||
if llm_model.model_entity.abilities.__contains__('func_call'):
|
||||
query.use_funcs = await self.ap.tool_mgr.get_all_tools()
|
||||
|
||||
@@ -7,8 +7,9 @@ import uuid
|
||||
import traceback
|
||||
|
||||
from .. import runner
|
||||
from ...core import app, entities as core_entities
|
||||
from .. import entities as llm_entities
|
||||
from ...core import app
|
||||
import langbot_plugin.api.entities.builtin.pipeline.query as pipeline_query
|
||||
import langbot_plugin.api.entities.builtin.provider.message as provider_message
|
||||
|
||||
|
||||
@runner.runner_class('langflow-api')
|
||||
@@ -19,7 +20,7 @@ class LangflowAPIRunner(runner.RequestRunner):
|
||||
self.ap = ap
|
||||
self.pipeline_config = pipeline_config
|
||||
|
||||
async def _build_request_payload(self, query: core_entities.Query) -> dict:
|
||||
async def _build_request_payload(self, query: pipeline_query.Query) -> dict:
|
||||
"""构建请求负载
|
||||
|
||||
Args:
|
||||
@@ -57,8 +58,8 @@ class LangflowAPIRunner(runner.RequestRunner):
|
||||
return payload
|
||||
|
||||
async def run(
|
||||
self, query: core_entities.Query
|
||||
) -> typing.AsyncGenerator[llm_entities.Message | llm_entities.MessageChunk, None]:
|
||||
self, query: pipeline_query.Query
|
||||
) -> typing.AsyncGenerator[provider_message.Message | provider_message.MessageChunk, None]:
|
||||
"""运行请求
|
||||
|
||||
Args:
|
||||
@@ -132,7 +133,7 @@ class LangflowAPIRunner(runner.RequestRunner):
|
||||
|
||||
# 每8条消息或有新内容时生成一个chunk
|
||||
if message_count % 8 == 0 or len(message_text) > 0:
|
||||
yield llm_entities.MessageChunk(
|
||||
yield provider_message.MessageChunk(
|
||||
role='assistant', content=accumulated_content, is_final=False
|
||||
)
|
||||
except json.JSONDecodeError:
|
||||
@@ -141,7 +142,7 @@ class LangflowAPIRunner(runner.RequestRunner):
|
||||
continue
|
||||
|
||||
# 发送最终消息
|
||||
yield llm_entities.MessageChunk(role='assistant', content=accumulated_content, is_final=True)
|
||||
yield provider_message.MessageChunk(role='assistant', content=accumulated_content, is_final=True)
|
||||
else:
|
||||
# 非流式请求
|
||||
response = await client.post(url, json=payload, headers=headers, timeout=120.0)
|
||||
@@ -174,7 +175,7 @@ class LangflowAPIRunner(runner.RequestRunner):
|
||||
|
||||
# 生成回复消息
|
||||
if is_stream:
|
||||
yield llm_entities.MessageChunk(role='assistant', content=message_text, is_final=True)
|
||||
yield provider_message.MessageChunk(role='assistant', content=message_text, is_final=True)
|
||||
else:
|
||||
reply_message = llm_entities.Message(role='assistant', content=message_text)
|
||||
reply_message = provider_message.Message(role='assistant', content=message_text)
|
||||
yield reply_message
|
||||
|
||||
Reference in New Issue
Block a user