mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 19:37:36 +08:00
Compare commits
30 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
79804b6ecd | ||
|
|
76434b2f4e | ||
|
|
ec8bd4922e | ||
|
|
4ffa773fac | ||
|
|
ea8b7bc8aa | ||
|
|
39ce5646f6 | ||
|
|
5092a82739 | ||
|
|
3bba0b6d9a | ||
|
|
7a19dd503d | ||
|
|
9e6a01fefd | ||
|
|
933471b4d9 | ||
|
|
f81808d239 | ||
|
|
96832b6f7d | ||
|
|
e2eb0a84b0 | ||
|
|
c8eb2e3376 | ||
|
|
21fe5822f9 | ||
|
|
d49cc9a7a3 | ||
|
|
910d0bfae1 | ||
|
|
d6761949ca | ||
|
|
6afac1f593 | ||
|
|
4d1a270d22 | ||
|
|
a7888f5536 | ||
|
|
b9049e91cf | ||
|
|
7db56c8e77 | ||
|
|
50563cb957 | ||
|
|
18ae2299a7 | ||
|
|
7463e0aab9 | ||
|
|
c92d47bb95 | ||
|
|
0b1af7df91 | ||
|
|
a9104eb2da |
10
README.md
10
README.md
@@ -39,7 +39,17 @@
|
||||
|
||||
<a href="https://github.com/RockChinQ/qcg-installer">安装器源码</a> |
|
||||
<a href="https://github.com/RockChinQ/qcg-tester">测试工程源码</a> |
|
||||
<a href="https://github.com/RockChinQ/qcg-center">遥测服务端源码</a> |
|
||||
<a href="https://github.com/the-lazy-me/QChatGPT-Wiki">官方文档储存库</a>
|
||||
|
||||
<hr/>
|
||||
|
||||
<div align="center">
|
||||
京东云4090单卡15C90G实例 <br/>
|
||||
仅需1.89/小时,包月1225元起 <br/>
|
||||
可选预装Stable Diffusion等应用,随用随停,计费透明,欢迎首选支持 <br/>
|
||||
https://3.cn/1ZOi6-Gj
|
||||
</div>
|
||||
|
||||
<img alt="回复效果(带有联网插件)" src="https://qchatgpt.rockchin.top/assets/image/QChatGPT-0516.png" width="500px"/>
|
||||
</div>
|
||||
|
||||
@@ -9,8 +9,6 @@ from .groups import plugin
|
||||
from ...core import app
|
||||
|
||||
|
||||
BACKEND_URL = "https://api.qchatgpt.rockchin.top/api/v2"
|
||||
|
||||
class V2CenterAPI:
|
||||
"""中央服务器 v2 API 交互类"""
|
||||
|
||||
@@ -23,7 +21,7 @@ class V2CenterAPI:
|
||||
plugin: plugin.V2PluginDataAPI = None
|
||||
"""插件 API 组"""
|
||||
|
||||
def __init__(self, ap: app.Application, basic_info: dict = None, runtime_info: dict = None):
|
||||
def __init__(self, ap: app.Application, backend_url: str, basic_info: dict = None, runtime_info: dict = None):
|
||||
"""初始化"""
|
||||
|
||||
logging.debug("basic_info: %s, runtime_info: %s", basic_info, runtime_info)
|
||||
@@ -31,7 +29,7 @@ class V2CenterAPI:
|
||||
apigroup.APIGroup._basic_info = basic_info
|
||||
apigroup.APIGroup._runtime_info = runtime_info
|
||||
|
||||
self.main = main.V2MainDataAPI(BACKEND_URL, ap)
|
||||
self.usage = usage.V2UsageDataAPI(BACKEND_URL, ap)
|
||||
self.plugin = plugin.V2PluginDataAPI(BACKEND_URL, ap)
|
||||
self.main = main.V2MainDataAPI(backend_url, ap)
|
||||
self.usage = usage.V2UsageDataAPI(backend_url, ap)
|
||||
self.plugin = plugin.V2PluginDataAPI(backend_url, ap)
|
||||
|
||||
|
||||
@@ -37,7 +37,10 @@ class JSONConfigFile(file_model.ConfigFile):
|
||||
self.template_data = json.load(f)
|
||||
|
||||
with open(self.config_file_name, "r", encoding="utf-8") as f:
|
||||
cfg = json.load(f)
|
||||
try:
|
||||
cfg = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
raise Exception(f"配置文件 {self.config_file_name} 语法错误: {e}")
|
||||
|
||||
if completion:
|
||||
|
||||
|
||||
20
pkg/config/migrations/m007_qcg_center_url.py
Normal file
20
pkg/config/migrations/m007_qcg_center_url.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import migration
|
||||
|
||||
|
||||
@migration.migration_class("qcg-center-url-config", 7)
|
||||
class QCGCenterURLConfigMigration(migration.Migration):
|
||||
"""迁移"""
|
||||
|
||||
async def need_migrate(self) -> bool:
|
||||
"""判断当前环境是否需要运行此迁移"""
|
||||
return "qcg-center-url" not in self.ap.system_cfg.data
|
||||
|
||||
async def run(self):
|
||||
"""执行迁移"""
|
||||
|
||||
if "qcg-center-url" not in self.ap.system_cfg.data:
|
||||
self.ap.system_cfg.data["qcg-center-url"] = "https://api.qchatgpt.rockchin.top/api/v2"
|
||||
|
||||
await self.ap.system_cfg.dump_config()
|
||||
29
pkg/config/migrations/m008_ad_fixwin_config_migrate.py
Normal file
29
pkg/config/migrations/m008_ad_fixwin_config_migrate.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import migration
|
||||
|
||||
|
||||
@migration.migration_class("ad-fixwin-cfg-migration", 8)
|
||||
class AdFixwinConfigMigration(migration.Migration):
|
||||
"""迁移"""
|
||||
|
||||
async def need_migrate(self) -> bool:
|
||||
"""判断当前环境是否需要运行此迁移"""
|
||||
return isinstance(
|
||||
self.ap.pipeline_cfg.data["rate-limit"]["fixwin"]["default"],
|
||||
int
|
||||
)
|
||||
|
||||
async def run(self):
|
||||
"""执行迁移"""
|
||||
|
||||
for session_name in self.ap.pipeline_cfg.data["rate-limit"]["fixwin"]:
|
||||
|
||||
temp_dict = {
|
||||
"window-size": 60,
|
||||
"limit": self.ap.pipeline_cfg.data["rate-limit"]["fixwin"][session_name]
|
||||
}
|
||||
|
||||
self.ap.pipeline_cfg.data["rate-limit"]["fixwin"][session_name] = temp_dict
|
||||
|
||||
await self.ap.pipeline_cfg.dump_config()
|
||||
24
pkg/config/migrations/m009_msg_truncator_cfg.py
Normal file
24
pkg/config/migrations/m009_msg_truncator_cfg.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import migration
|
||||
|
||||
|
||||
@migration.migration_class("msg-truncator-cfg-migration", 9)
|
||||
class MsgTruncatorConfigMigration(migration.Migration):
|
||||
"""迁移"""
|
||||
|
||||
async def need_migrate(self) -> bool:
|
||||
"""判断当前环境是否需要运行此迁移"""
|
||||
return 'msg-truncate' not in self.ap.pipeline_cfg.data
|
||||
|
||||
async def run(self):
|
||||
"""执行迁移"""
|
||||
|
||||
self.ap.pipeline_cfg.data['msg-truncate'] = {
|
||||
'method': 'round',
|
||||
'round': {
|
||||
'max-round': 10
|
||||
}
|
||||
}
|
||||
|
||||
await self.ap.pipeline_cfg.dump_config()
|
||||
@@ -1,5 +1,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import traceback
|
||||
|
||||
from . import app
|
||||
from ..audit import identifier
|
||||
from . import stage
|
||||
@@ -27,6 +29,7 @@ async def make_app() -> app.Application:
|
||||
for stage_name in stage_order:
|
||||
stage_cls = stage.preregistered_stages[stage_name]
|
||||
stage_inst = stage_cls()
|
||||
|
||||
await stage_inst.run(ap)
|
||||
|
||||
await ap.initialize()
|
||||
@@ -35,5 +38,8 @@ async def make_app() -> app.Application:
|
||||
|
||||
|
||||
async def main():
|
||||
app_inst = await make_app()
|
||||
await app_inst.run()
|
||||
try:
|
||||
app_inst = await make_app()
|
||||
await app_inst.run()
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -8,16 +8,3 @@ from ...config.impls import pymodule
|
||||
|
||||
load_python_module_config = config_mgr.load_python_module_config
|
||||
load_json_config = config_mgr.load_json_config
|
||||
|
||||
|
||||
async def override_config_manager(cfg_mgr: config_mgr.ConfigManager) -> list[str]:
|
||||
override_json = json.load(open("override.json", "r", encoding="utf-8"))
|
||||
overrided = []
|
||||
|
||||
config = cfg_mgr.data
|
||||
for key in override_json:
|
||||
if key in config:
|
||||
config[key] = override_json[key]
|
||||
overrided.append(key)
|
||||
|
||||
return overrided
|
||||
|
||||
@@ -67,7 +67,7 @@ class Query(pydantic.BaseModel):
|
||||
use_funcs: typing.Optional[list[tools_entities.LLMFunction]] = None
|
||||
"""使用的函数,由前置处理器阶段设置"""
|
||||
|
||||
resp_messages: typing.Optional[list[llm_entities.Message]] = []
|
||||
resp_messages: typing.Optional[list[llm_entities.Message]] | typing.Optional[list[mirai.MessageChain]] = []
|
||||
"""由Process阶段生成的回复消息对象列表"""
|
||||
|
||||
resp_message_chain: typing.Optional[list[mirai.MessageChain]] = None
|
||||
|
||||
@@ -34,6 +34,7 @@ class BuildAppStage(stage.BootingStage):
|
||||
|
||||
center_v2_api = center_v2.V2CenterAPI(
|
||||
ap,
|
||||
backend_url=ap.system_cfg.data["qcg-center-url"],
|
||||
basic_info={
|
||||
"host_id": identifier.identifier["host_id"],
|
||||
"instance_id": identifier.identifier["instance_id"],
|
||||
|
||||
@@ -4,8 +4,8 @@ import importlib
|
||||
|
||||
from .. import stage, app
|
||||
from ...config import migration
|
||||
from ...config.migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion, m006_vision_config
|
||||
from ...config.migrations import m005_deepseek_cfg_completion
|
||||
from ...config.migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion
|
||||
from ...config.migrations import m005_deepseek_cfg_completion, m006_vision_config, m007_qcg_center_url, m008_ad_fixwin_config_migrate, m009_msg_truncator_cfg
|
||||
|
||||
|
||||
@stage.stage_class("MigrationStage")
|
||||
|
||||
@@ -163,13 +163,13 @@ class ContentFilterStage(stage.PipelineStage):
|
||||
)
|
||||
elif stage_inst_name == 'PostContentFilterStage':
|
||||
# 仅处理 query.resp_messages[-1].content 是 str 的情况
|
||||
if isinstance(query.resp_messages[-1].content, str):
|
||||
if isinstance(query.resp_messages[-1], llm_entities.Message) and isinstance(query.resp_messages[-1].content, str):
|
||||
return await self._post_process(
|
||||
query.resp_messages[-1].content,
|
||||
query
|
||||
)
|
||||
else:
|
||||
self.ap.logger.debug(f"resp_messages[-1] 不是 str 类型,跳过内容过滤器检查。")
|
||||
self.ap.logger.debug(f"resp_messages[-1] 不是 Message 类型或 query.resp_messages[-1].content 不是 str 类型,跳过内容过滤器检查。")
|
||||
return entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
|
||||
@@ -34,18 +34,18 @@ class LongTextProcessStage(stage.PipelineStage):
|
||||
if os.name == "nt":
|
||||
use_font = "C:/Windows/Fonts/msyh.ttc"
|
||||
if not os.path.exists(use_font):
|
||||
self.ap.logger.warn("未找到字体文件,且无法使用Windows自带字体,更换为转发消息组件以发送长消息,您可以在config.py中调整相关设置。")
|
||||
self.ap.logger.warn("未找到字体文件,且无法使用Windows自带字体,更换为转发消息组件以发送长消息,您可以在配置文件中调整相关设置。")
|
||||
config['blob_message_strategy'] = "forward"
|
||||
else:
|
||||
self.ap.logger.info("使用Windows自带字体:" + use_font)
|
||||
config['font-path'] = use_font
|
||||
else:
|
||||
self.ap.logger.warn("未找到字体文件,且无法使用系统自带字体,更换为转发消息组件以发送长消息,您可以在config.py中调整相关设置。")
|
||||
self.ap.logger.warn("未找到字体文件,且无法使用系统自带字体,更换为转发消息组件以发送长消息,您可以在配置文件中调整相关设置。")
|
||||
|
||||
self.ap.platform_cfg.data['long-text-process']['strategy'] = "forward"
|
||||
except:
|
||||
traceback.print_exc()
|
||||
self.ap.logger.error("加载字体文件失败({}),更换为转发消息组件以发送长消息,您可以在config.py中调整相关设置。".format(use_font))
|
||||
self.ap.logger.error("加载字体文件失败({}),更换为转发消息组件以发送长消息,您可以在配置文件中调整相关设置。".format(use_font))
|
||||
|
||||
self.ap.platform_cfg.data['long-text-process']['strategy'] = "forward"
|
||||
|
||||
|
||||
0
pkg/pipeline/msgtrun/__init__.py
Normal file
0
pkg/pipeline/msgtrun/__init__.py
Normal file
35
pkg/pipeline/msgtrun/msgtrun.py
Normal file
35
pkg/pipeline/msgtrun/msgtrun.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import stage, entities, stagemgr
|
||||
from ...core import entities as core_entities
|
||||
from . import truncator
|
||||
from .truncators import round
|
||||
|
||||
|
||||
@stage.stage_class("ConversationMessageTruncator")
|
||||
class ConversationMessageTruncator(stage.PipelineStage):
|
||||
"""会话消息截断器
|
||||
|
||||
用于截断会话消息链,以适应平台消息长度限制。
|
||||
"""
|
||||
trun: truncator.Truncator
|
||||
|
||||
async def initialize(self):
|
||||
use_method = self.ap.pipeline_cfg.data['msg-truncate']['method']
|
||||
|
||||
for trun in truncator.preregistered_truncators:
|
||||
if trun.name == use_method:
|
||||
self.trun = trun(self.ap)
|
||||
break
|
||||
else:
|
||||
raise ValueError(f"未知的截断器: {use_method}")
|
||||
|
||||
async def process(self, query: core_entities.Query, stage_inst_name: str) -> entities.StageProcessResult:
|
||||
"""处理
|
||||
"""
|
||||
query = await self.trun.truncate(query)
|
||||
|
||||
return entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
56
pkg/pipeline/msgtrun/truncator.py
Normal file
56
pkg/pipeline/msgtrun/truncator.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
import abc
|
||||
|
||||
from ...core import entities as core_entities, app
|
||||
|
||||
|
||||
preregistered_truncators: list[typing.Type[Truncator]] = []
|
||||
|
||||
|
||||
def truncator_class(
|
||||
name: str
|
||||
) -> typing.Callable[[typing.Type[Truncator]], typing.Type[Truncator]]:
|
||||
"""截断器类装饰器
|
||||
|
||||
Args:
|
||||
name (str): 截断器名称
|
||||
|
||||
Returns:
|
||||
typing.Callable[[typing.Type[Truncator]], typing.Type[Truncator]]: 装饰器
|
||||
"""
|
||||
def decorator(cls: typing.Type[Truncator]) -> typing.Type[Truncator]:
|
||||
assert issubclass(cls, Truncator)
|
||||
|
||||
cls.name = name
|
||||
|
||||
preregistered_truncators.append(cls)
|
||||
|
||||
return cls
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class Truncator(abc.ABC):
|
||||
"""消息截断器基类
|
||||
"""
|
||||
|
||||
name: str
|
||||
|
||||
ap: app.Application
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
self.ap = ap
|
||||
|
||||
async def initialize(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
async def truncate(self, query: core_entities.Query) -> core_entities.Query:
|
||||
"""截断
|
||||
|
||||
一般只需要操作query.messages,也可以扩展操作query.prompt, query.user_message。
|
||||
请勿操作其他字段。
|
||||
"""
|
||||
pass
|
||||
0
pkg/pipeline/msgtrun/truncators/__init__.py
Normal file
0
pkg/pipeline/msgtrun/truncators/__init__.py
Normal file
32
pkg/pipeline/msgtrun/truncators/round.py
Normal file
32
pkg/pipeline/msgtrun/truncators/round.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import truncator
|
||||
from ....core import entities as core_entities
|
||||
|
||||
|
||||
@truncator.truncator_class("round")
|
||||
class RoundTruncator(truncator.Truncator):
|
||||
"""前文回合数阶段器
|
||||
"""
|
||||
|
||||
async def truncate(self, query: core_entities.Query) -> core_entities.Query:
|
||||
"""截断
|
||||
"""
|
||||
max_round = self.ap.pipeline_cfg.data['msg-truncate']['round']['max-round']
|
||||
|
||||
temp_messages = []
|
||||
|
||||
current_round = 0
|
||||
|
||||
# 从后往前遍历
|
||||
for msg in query.messages[::-1]:
|
||||
if current_round < max_round:
|
||||
temp_messages.append(msg)
|
||||
if msg.role == 'user':
|
||||
current_round += 1
|
||||
else:
|
||||
break
|
||||
|
||||
query.messages = temp_messages[::-1]
|
||||
|
||||
return query
|
||||
@@ -42,12 +42,7 @@ class ChatMessageHandler(handler.MessageHandler):
|
||||
if event_ctx.event.reply is not None:
|
||||
mc = mirai.MessageChain(event_ctx.event.reply)
|
||||
|
||||
query.resp_messages.append(
|
||||
llm_entities.Message(
|
||||
role='plugin',
|
||||
content=mc,
|
||||
)
|
||||
)
|
||||
query.resp_messages.append(mc)
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
|
||||
@@ -48,12 +48,7 @@ class CommandHandler(handler.MessageHandler):
|
||||
if event_ctx.event.reply is not None:
|
||||
mc = mirai.MessageChain(event_ctx.event.reply)
|
||||
|
||||
query.resp_messages.append(
|
||||
llm_entities.Message(
|
||||
role='command',
|
||||
content=str(mc),
|
||||
)
|
||||
)
|
||||
query.resp_messages.append(mc)
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
# 固定窗口算法
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
|
||||
from .. import algo
|
||||
|
||||
|
||||
# 固定窗口算法
|
||||
class SessionContainer:
|
||||
|
||||
wait_lock: asyncio.Lock
|
||||
|
||||
records: dict[int, int]
|
||||
"""访问记录,key为每分钟的起始时间戳,value为访问次数"""
|
||||
"""访问记录,key为每窗口长度的起始时间戳,value为访问次数"""
|
||||
|
||||
def __init__(self):
|
||||
self.wait_lock = asyncio.Lock()
|
||||
@@ -47,30 +44,34 @@ class FixedWindowAlgo(algo.ReteLimitAlgo):
|
||||
|
||||
# 等待锁
|
||||
async with container.wait_lock:
|
||||
|
||||
# 获取窗口大小和限制
|
||||
window_size = self.ap.pipeline_cfg.data['rate-limit']['fixwin']['default']['window-size']
|
||||
limitation = self.ap.pipeline_cfg.data['rate-limit']['fixwin']['default']['limit']
|
||||
|
||||
if session_name in self.ap.pipeline_cfg.data['rate-limit']['fixwin']:
|
||||
window_size = self.ap.pipeline_cfg.data['rate-limit']['fixwin'][session_name]['window-size']
|
||||
limitation = self.ap.pipeline_cfg.data['rate-limit']['fixwin'][session_name]['limit']
|
||||
|
||||
# 获取当前时间戳
|
||||
now = int(time.time())
|
||||
|
||||
# 获取当前分钟的起始时间戳
|
||||
now = now - now % 60
|
||||
# 获取当前窗口的起始时间戳
|
||||
now = now - now % window_size
|
||||
|
||||
# 获取当前分钟的访问次数
|
||||
# 获取当前窗口的访问次数
|
||||
count = container.records.get(now, 0)
|
||||
|
||||
limitation = self.ap.pipeline_cfg.data['rate-limit']['fixwin']['default']
|
||||
|
||||
if session_name in self.ap.pipeline_cfg.data['rate-limit']['fixwin']:
|
||||
limitation = self.ap.pipeline_cfg.data['rate-limit']['fixwin'][session_name]
|
||||
|
||||
# 如果访问次数超过了限制
|
||||
if count >= limitation:
|
||||
if self.ap.pipeline_cfg.data['rate-limit']['strategy'] == 'drop':
|
||||
return False
|
||||
elif self.ap.pipeline_cfg.data['rate-limit']['strategy'] == 'wait':
|
||||
# 等待下一分钟
|
||||
await asyncio.sleep(60 - time.time() % 60)
|
||||
# 等待下一窗口
|
||||
await asyncio.sleep(window_size - time.time() % window_size)
|
||||
|
||||
now = int(time.time())
|
||||
now = now - now % 60
|
||||
now = now - now % window_size
|
||||
|
||||
if now not in container.records:
|
||||
container.records = {}
|
||||
|
||||
@@ -44,8 +44,8 @@ class GroupRespondRuleCheckStage(stage.PipelineStage):
|
||||
|
||||
use_rule = rules['default']
|
||||
|
||||
if str(query.launcher_id) in use_rule:
|
||||
use_rule = use_rule[str(query.launcher_id)]
|
||||
if str(query.launcher_id) in rules:
|
||||
use_rule = rules[str(query.launcher_id)]
|
||||
|
||||
for rule_matcher in self.rule_matchers: # 任意一个匹配就放行
|
||||
res = await rule_matcher.match(str(query.message_chain), query.message_chain, use_rule, query)
|
||||
|
||||
@@ -20,11 +20,14 @@ class PrefixRule(rule_model.GroupRespondRule):
|
||||
for prefix in prefixes:
|
||||
if message_text.startswith(prefix):
|
||||
|
||||
# 查找第一个plain元素
|
||||
for me in message_chain:
|
||||
if isinstance(me, mirai.Plain):
|
||||
me.text = me.text[len(prefix):]
|
||||
|
||||
return entities.RuleJudgeResult(
|
||||
matching=True,
|
||||
replacement=mirai.MessageChain([
|
||||
mirai.Plain(message_text[len(prefix):])
|
||||
]),
|
||||
replacement=message_chain,
|
||||
)
|
||||
|
||||
return entities.RuleJudgeResult(
|
||||
|
||||
@@ -13,6 +13,7 @@ from .respback import respback
|
||||
from .wrapper import wrapper
|
||||
from .preproc import preproc
|
||||
from .ratelimit import ratelimit
|
||||
from .msgtrun import msgtrun
|
||||
|
||||
|
||||
# 请求处理阶段顺序
|
||||
@@ -21,6 +22,7 @@ stage_order = [
|
||||
"BanSessionCheckStage", # 封禁会话检查
|
||||
"PreContentFilterStage", # 内容过滤前置阶段
|
||||
"PreProcessor", # 预处理器
|
||||
"ConversationMessageTruncator", # 会话消息截断器
|
||||
"RequireRateLimitOccupancy", # 请求速率限制占用
|
||||
"MessageProcessor", # 处理器
|
||||
"ReleaseRateLimitOccupancy", # 释放速率限制占用
|
||||
|
||||
@@ -32,80 +32,49 @@ class ResponseWrapper(stage.PipelineStage):
|
||||
) -> typing.AsyncGenerator[entities.StageProcessResult, None]:
|
||||
"""处理
|
||||
"""
|
||||
|
||||
if query.resp_messages[-1].role == 'command':
|
||||
# query.resp_message_chain.append(mirai.MessageChain("[bot] "+query.resp_messages[-1].content))
|
||||
query.resp_message_chain.append(query.resp_messages[-1].get_content_mirai_message_chain(prefix_text='[bot] '))
|
||||
|
||||
# 如果 resp_messages[-1] 已经是 MessageChain 了
|
||||
if isinstance(query.resp_messages[-1], mirai.MessageChain):
|
||||
query.resp_message_chain.append(query.resp_messages[-1])
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
elif query.resp_messages[-1].role == 'plugin':
|
||||
# if not isinstance(query.resp_messages[-1].content, mirai.MessageChain):
|
||||
# query.resp_message_chain.append(mirai.MessageChain(query.resp_messages[-1].content))
|
||||
# else:
|
||||
# query.resp_message_chain.append(query.resp_messages[-1].content)
|
||||
query.resp_message_chain.append(query.resp_messages[-1].get_content_mirai_message_chain())
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
else:
|
||||
|
||||
if query.resp_messages[-1].role == 'command':
|
||||
# query.resp_message_chain.append(mirai.MessageChain("[bot] "+query.resp_messages[-1].content))
|
||||
query.resp_message_chain.append(query.resp_messages[-1].get_content_mirai_message_chain(prefix_text='[bot] '))
|
||||
|
||||
if query.resp_messages[-1].role == 'assistant':
|
||||
result = query.resp_messages[-1]
|
||||
session = await self.ap.sess_mgr.get_session(query)
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
elif query.resp_messages[-1].role == 'plugin':
|
||||
# if not isinstance(query.resp_messages[-1].content, mirai.MessageChain):
|
||||
# query.resp_message_chain.append(mirai.MessageChain(query.resp_messages[-1].content))
|
||||
# else:
|
||||
# query.resp_message_chain.append(query.resp_messages[-1].content)
|
||||
query.resp_message_chain.append(query.resp_messages[-1].get_content_mirai_message_chain())
|
||||
|
||||
reply_text = ''
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
else:
|
||||
|
||||
if result.content is not None: # 有内容
|
||||
reply_text = str(result.get_content_mirai_message_chain())
|
||||
if query.resp_messages[-1].role == 'assistant':
|
||||
result = query.resp_messages[-1]
|
||||
session = await self.ap.sess_mgr.get_session(query)
|
||||
|
||||
# ============= 触发插件事件 ===============
|
||||
event_ctx = await self.ap.plugin_mgr.emit_event(
|
||||
event=events.NormalMessageResponded(
|
||||
launcher_type=query.launcher_type.value,
|
||||
launcher_id=query.launcher_id,
|
||||
sender_id=query.sender_id,
|
||||
session=session,
|
||||
prefix='',
|
||||
response_text=reply_text,
|
||||
finish_reason='stop',
|
||||
funcs_called=[fc.function.name for fc in result.tool_calls] if result.tool_calls is not None else [],
|
||||
query=query
|
||||
)
|
||||
)
|
||||
if event_ctx.is_prevented_default():
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.INTERRUPT,
|
||||
new_query=query
|
||||
)
|
||||
else:
|
||||
if event_ctx.event.reply is not None:
|
||||
|
||||
query.resp_message_chain.append(mirai.MessageChain(event_ctx.event.reply))
|
||||
reply_text = ''
|
||||
|
||||
else:
|
||||
if result.content: # 有内容
|
||||
reply_text = str(result.get_content_mirai_message_chain())
|
||||
|
||||
query.resp_message_chain.append(result.get_content_mirai_message_chain())
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
|
||||
if result.tool_calls is not None: # 有函数调用
|
||||
|
||||
function_names = [tc.function.name for tc in result.tool_calls]
|
||||
|
||||
reply_text = f'调用函数 {".".join(function_names)}...'
|
||||
|
||||
query.resp_message_chain.append(mirai.MessageChain([mirai.Plain(reply_text)]))
|
||||
|
||||
if self.ap.platform_cfg.data['track-function-calls']:
|
||||
|
||||
# ============= 触发插件事件 ===============
|
||||
event_ctx = await self.ap.plugin_mgr.emit_event(
|
||||
event=events.NormalMessageResponded(
|
||||
launcher_type=query.launcher_type.value,
|
||||
@@ -119,7 +88,6 @@ class ResponseWrapper(stage.PipelineStage):
|
||||
query=query
|
||||
)
|
||||
)
|
||||
|
||||
if event_ctx.is_prevented_default():
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.INTERRUPT,
|
||||
@@ -132,9 +100,52 @@ class ResponseWrapper(stage.PipelineStage):
|
||||
|
||||
else:
|
||||
|
||||
query.resp_message_chain.append(mirai.MessageChain([mirai.Plain(reply_text)]))
|
||||
query.resp_message_chain.append(result.get_content_mirai_message_chain())
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
|
||||
if result.tool_calls is not None: # 有函数调用
|
||||
|
||||
function_names = [tc.function.name for tc in result.tool_calls]
|
||||
|
||||
reply_text = f'调用函数 {".".join(function_names)}...'
|
||||
|
||||
query.resp_message_chain.append(mirai.MessageChain([mirai.Plain(reply_text)]))
|
||||
|
||||
if self.ap.platform_cfg.data['track-function-calls']:
|
||||
|
||||
event_ctx = await self.ap.plugin_mgr.emit_event(
|
||||
event=events.NormalMessageResponded(
|
||||
launcher_type=query.launcher_type.value,
|
||||
launcher_id=query.launcher_id,
|
||||
sender_id=query.sender_id,
|
||||
session=session,
|
||||
prefix='',
|
||||
response_text=reply_text,
|
||||
finish_reason='stop',
|
||||
funcs_called=[fc.function.name for fc in result.tool_calls] if result.tool_calls is not None else [],
|
||||
query=query
|
||||
)
|
||||
)
|
||||
|
||||
if event_ctx.is_prevented_default():
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.INTERRUPT,
|
||||
new_query=query
|
||||
)
|
||||
else:
|
||||
if event_ctx.event.reply is not None:
|
||||
|
||||
query.resp_message_chain.append(mirai.MessageChain(event_ctx.event.reply))
|
||||
|
||||
else:
|
||||
|
||||
query.resp_message_chain.append(mirai.MessageChain([mirai.Plain(reply_text)]))
|
||||
|
||||
yield entities.StageProcessResult(
|
||||
result_type=entities.ResultType.CONTINUE,
|
||||
new_query=query
|
||||
)
|
||||
|
||||
@@ -31,13 +31,15 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
||||
msg_time = msg.time
|
||||
elif type(msg) is mirai.Image:
|
||||
arg = ''
|
||||
|
||||
if msg.url:
|
||||
if msg.base64:
|
||||
arg = msg.base64
|
||||
msg_list.append(aiocqhttp.MessageSegment.image(f"base64://{arg}"))
|
||||
elif msg.url:
|
||||
arg = msg.url
|
||||
msg_list.append(aiocqhttp.MessageSegment.image(arg))
|
||||
elif msg.path:
|
||||
arg = msg.path
|
||||
|
||||
msg_list.append(aiocqhttp.MessageSegment.image(arg))
|
||||
msg_list.append(aiocqhttp.MessageSegment.image(arg))
|
||||
elif type(msg) is mirai.At:
|
||||
msg_list.append(aiocqhttp.MessageSegment.at(msg.target))
|
||||
elif type(msg) is mirai.AtAll:
|
||||
|
||||
@@ -322,7 +322,7 @@ class NakuruProjectAdapter(adapter_model.MessageSourceAdapter):
|
||||
proxies=None
|
||||
)
|
||||
if resp.status_code == 403:
|
||||
raise Exception("go-cqhttp拒绝访问,请检查config.py中nakuru_config的token是否与go-cqhttp设置的access-token匹配")
|
||||
raise Exception("go-cqhttp拒绝访问,请检查配置文件中nakuru适配器的配置")
|
||||
self.bot_account_id = int(resp.json()['data']['user_id'])
|
||||
except Exception as e:
|
||||
raise Exception("获取go-cqhttp账号信息失败, 请检查是否已启动go-cqhttp并配置正确")
|
||||
|
||||
@@ -96,7 +96,16 @@ class Message(pydantic.BaseModel):
|
||||
if ce.type == 'text':
|
||||
mc.append(mirai.Plain(ce.text))
|
||||
elif ce.type == 'image':
|
||||
mc.append(mirai.Image(url=ce.image_url))
|
||||
if ce.image_url.url.startswith("http"):
|
||||
mc.append(mirai.Image(url=ce.image_url.url))
|
||||
else: # base64
|
||||
|
||||
b64_str = ce.image_url.url
|
||||
|
||||
if b64_str.startswith("data:"):
|
||||
b64_str = b64_str.split(",")[1]
|
||||
|
||||
mc.append(mirai.Image(base64=b64_str))
|
||||
|
||||
# 找第一个文字组件
|
||||
if prefix_text:
|
||||
|
||||
@@ -11,6 +11,7 @@ from .. import api, entities, errors
|
||||
from ....core import entities as core_entities
|
||||
from ... import entities as llm_entities
|
||||
from ...tools import entities as tools_entities
|
||||
from ....utils import image
|
||||
|
||||
|
||||
@api.requester_class("anthropic-messages")
|
||||
@@ -54,29 +55,45 @@ class AnthropicMessages(api.LLMAPIRequester):
|
||||
and isinstance(system_role_message.content, str):
|
||||
args['system'] = system_role_message.content
|
||||
|
||||
# 其他消息
|
||||
# req_messages = [
|
||||
# m.dict(exclude_none=True) for m in messages \
|
||||
# if (isinstance(m.content, str) and m.content.strip() != "") \
|
||||
# or (isinstance(m.content, list) and )
|
||||
# ]
|
||||
# 暂时不支持vision,仅保留纯文字的content
|
||||
req_messages = []
|
||||
|
||||
for m in messages:
|
||||
if isinstance(m.content, str) and m.content.strip() != "":
|
||||
req_messages.append(m.dict(exclude_none=True))
|
||||
elif isinstance(m.content, list):
|
||||
# 删除m.content中的type!=text的元素
|
||||
m.content = [
|
||||
c for c in m.content if c.type == "text"
|
||||
]
|
||||
# m.content = [
|
||||
# c for c in m.content if c.type == "text"
|
||||
# ]
|
||||
|
||||
if len(m.content) > 0:
|
||||
req_messages.append(m.dict(exclude_none=True))
|
||||
# if len(m.content) > 0:
|
||||
# req_messages.append(m.dict(exclude_none=True))
|
||||
|
||||
msg_dict = m.dict(exclude_none=True)
|
||||
|
||||
for i, ce in enumerate(m.content):
|
||||
if ce.type == "image_url":
|
||||
alter_image_ele = {
|
||||
"type": "image",
|
||||
"source": {
|
||||
"type": "base64",
|
||||
"media_type": "image/jpeg",
|
||||
"data": await image.qq_image_url_to_base64(ce.image_url.url)
|
||||
}
|
||||
}
|
||||
msg_dict["content"][i] = alter_image_ele
|
||||
|
||||
req_messages.append(msg_dict)
|
||||
|
||||
args["messages"] = req_messages
|
||||
|
||||
# anthropic的tools处在beta阶段,sdk不稳定,故暂时不支持
|
||||
#
|
||||
# if funcs:
|
||||
# tools = await self.ap.tool_mgr.generate_tools_for_openai(funcs)
|
||||
|
||||
# if tools:
|
||||
# args["tools"] = tools
|
||||
|
||||
try:
|
||||
resp = await self.client.messages.create(**args)
|
||||
|
||||
|
||||
@@ -102,9 +102,16 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
||||
messages: typing.List[llm_entities.Message],
|
||||
funcs: typing.List[tools_entities.LLMFunction] = None,
|
||||
) -> llm_entities.Message:
|
||||
req_messages = [ # req_messages 仅用于类内,外部同步由 query.messages 进行
|
||||
m.dict(exclude_none=True) for m in messages
|
||||
]
|
||||
req_messages = [] # req_messages 仅用于类内,外部同步由 query.messages 进行
|
||||
for m in messages:
|
||||
msg_dict = m.dict(exclude_none=True)
|
||||
content = msg_dict.get("content")
|
||||
if isinstance(content, list):
|
||||
# 检查 content 列表中是否每个部分都是文本
|
||||
if all(isinstance(part, dict) and part.get("type") == "text" for part in content):
|
||||
# 将所有文本部分合并为一个字符串
|
||||
msg_dict["content"] = "\n".join(part["text"] for part in content)
|
||||
req_messages.append(msg_dict)
|
||||
|
||||
try:
|
||||
return await self._closure(req_messages, model, funcs)
|
||||
|
||||
@@ -9,11 +9,10 @@ from ...plugin import context as plugin_context
|
||||
|
||||
|
||||
class ToolManager:
|
||||
"""LLM工具管理器
|
||||
"""
|
||||
"""LLM工具管理器"""
|
||||
|
||||
ap: app.Application
|
||||
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
self.ap = ap
|
||||
self.all_functions = []
|
||||
@@ -22,35 +21,33 @@ class ToolManager:
|
||||
pass
|
||||
|
||||
async def get_function(self, name: str) -> entities.LLMFunction:
|
||||
"""获取函数
|
||||
"""
|
||||
"""获取函数"""
|
||||
for function in await self.get_all_functions():
|
||||
if function.name == name:
|
||||
return function
|
||||
return None
|
||||
|
||||
async def get_function_and_plugin(self, name: str) -> typing.Tuple[entities.LLMFunction, plugin_context.BasePlugin]:
|
||||
"""获取函数和插件
|
||||
"""
|
||||
|
||||
async def get_function_and_plugin(
|
||||
self, name: str
|
||||
) -> typing.Tuple[entities.LLMFunction, plugin_context.BasePlugin]:
|
||||
"""获取函数和插件"""
|
||||
for plugin in self.ap.plugin_mgr.plugins:
|
||||
for function in plugin.content_functions:
|
||||
if function.name == name:
|
||||
return function, plugin.plugin_inst
|
||||
return None, None
|
||||
|
||||
|
||||
async def get_all_functions(self) -> list[entities.LLMFunction]:
|
||||
"""获取所有函数
|
||||
"""
|
||||
"""获取所有函数"""
|
||||
all_functions: list[entities.LLMFunction] = []
|
||||
|
||||
|
||||
for plugin in self.ap.plugin_mgr.plugins:
|
||||
all_functions.extend(plugin.content_functions)
|
||||
|
||||
|
||||
return all_functions
|
||||
|
||||
async def generate_tools_for_openai(self, use_funcs: entities.LLMFunction) -> str:
|
||||
"""生成函数列表
|
||||
"""
|
||||
async def generate_tools_for_openai(self, use_funcs: list[entities.LLMFunction]) -> list:
|
||||
"""生成函数列表"""
|
||||
tools = []
|
||||
|
||||
for function in use_funcs:
|
||||
@@ -60,40 +57,71 @@ class ToolManager:
|
||||
"function": {
|
||||
"name": function.name,
|
||||
"description": function.description,
|
||||
"parameters": function.parameters
|
||||
}
|
||||
"parameters": function.parameters,
|
||||
},
|
||||
}
|
||||
tools.append(function_schema)
|
||||
|
||||
return tools
|
||||
|
||||
async def generate_tools_for_anthropic(
|
||||
self, use_funcs: list[entities.LLMFunction]
|
||||
) -> list:
|
||||
"""为anthropic生成函数列表
|
||||
|
||||
e.g.
|
||||
|
||||
[
|
||||
{
|
||||
"name": "get_stock_price",
|
||||
"description": "Get the current stock price for a given ticker symbol.",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ticker": {
|
||||
"type": "string",
|
||||
"description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
|
||||
}
|
||||
},
|
||||
"required": ["ticker"]
|
||||
}
|
||||
}
|
||||
]
|
||||
"""
|
||||
|
||||
tools = []
|
||||
|
||||
for function in use_funcs:
|
||||
if function.enable:
|
||||
function_schema = {
|
||||
"name": function.name,
|
||||
"description": function.description,
|
||||
"input_schema": function.parameters,
|
||||
}
|
||||
tools.append(function_schema)
|
||||
|
||||
return tools
|
||||
|
||||
async def execute_func_call(
|
||||
self,
|
||||
query: core_entities.Query,
|
||||
name: str,
|
||||
parameters: dict
|
||||
self, query: core_entities.Query, name: str, parameters: dict
|
||||
) -> typing.Any:
|
||||
"""执行函数调用
|
||||
"""
|
||||
"""执行函数调用"""
|
||||
|
||||
try:
|
||||
|
||||
function, plugin = await self.get_function_and_plugin(name)
|
||||
if function is None:
|
||||
return None
|
||||
|
||||
|
||||
parameters = parameters.copy()
|
||||
|
||||
parameters = {
|
||||
"query": query,
|
||||
**parameters
|
||||
}
|
||||
|
||||
parameters = {"query": query, **parameters}
|
||||
|
||||
return await function.func(plugin, **parameters)
|
||||
except Exception as e:
|
||||
self.ap.logger.error(f'执行函数 {name} 时发生错误: {e}')
|
||||
self.ap.logger.error(f"执行函数 {name} 时发生错误: {e}")
|
||||
traceback.print_exc()
|
||||
return f'error occurred when executing function {name}: {e}'
|
||||
return f"error occurred when executing function {name}: {e}"
|
||||
finally:
|
||||
|
||||
plugin = None
|
||||
@@ -107,11 +135,11 @@ class ToolManager:
|
||||
|
||||
await self.ap.ctr_mgr.usage.post_function_record(
|
||||
plugin={
|
||||
'name': plugin.plugin_name,
|
||||
'remote': plugin.plugin_source,
|
||||
'version': plugin.plugin_version,
|
||||
'author': plugin.plugin_author
|
||||
"name": plugin.plugin_name,
|
||||
"remote": plugin.plugin_source,
|
||||
"version": plugin.plugin_version,
|
||||
"author": plugin.plugin_author,
|
||||
},
|
||||
function_name=function.name,
|
||||
function_description=function.description,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1 +1 @@
|
||||
semantic_version = "v3.2.0.1"
|
||||
semantic_version = "v3.2.3"
|
||||
|
||||
@@ -83,32 +83,38 @@
|
||||
{
|
||||
"name": "claude-3-opus-20240229",
|
||||
"requester": "anthropic-messages",
|
||||
"token_mgr": "anthropic"
|
||||
"token_mgr": "anthropic",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "claude-3-sonnet-20240229",
|
||||
"requester": "anthropic-messages",
|
||||
"token_mgr": "anthropic"
|
||||
"token_mgr": "anthropic",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "claude-3-haiku-20240307",
|
||||
"requester": "anthropic-messages",
|
||||
"token_mgr": "anthropic"
|
||||
"token_mgr": "anthropic",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "moonshot-v1-8k",
|
||||
"requester": "moonshot-chat-completions",
|
||||
"token_mgr": "moonshot"
|
||||
"token_mgr": "moonshot",
|
||||
"tool_call_supported": true
|
||||
},
|
||||
{
|
||||
"name": "moonshot-v1-32k",
|
||||
"requester": "moonshot-chat-completions",
|
||||
"token_mgr": "moonshot"
|
||||
"token_mgr": "moonshot",
|
||||
"tool_call_supported": true
|
||||
},
|
||||
{
|
||||
"name": "moonshot-v1-128k",
|
||||
"requester": "moonshot-chat-completions",
|
||||
"token_mgr": "moonshot"
|
||||
"token_mgr": "moonshot",
|
||||
"tool_call_supported": true
|
||||
},
|
||||
{
|
||||
"name": "deepseek-chat",
|
||||
|
||||
@@ -29,7 +29,16 @@
|
||||
"strategy": "drop",
|
||||
"algo": "fixwin",
|
||||
"fixwin": {
|
||||
"default": 60
|
||||
"default": {
|
||||
"window-size": 60,
|
||||
"limit": 60
|
||||
}
|
||||
}
|
||||
},
|
||||
"msg-truncate": {
|
||||
"method": "round",
|
||||
"round": {
|
||||
"max-round": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,5 +10,6 @@
|
||||
"default": 1
|
||||
},
|
||||
"pipeline-concurrency": 20,
|
||||
"qcg-center-url": "https://api.qchatgpt.rockchin.top/api/v2",
|
||||
"help-message": "QChatGPT - 😎高稳定性、🧩支持插件、🌏实时联网的 ChatGPT QQ 机器人🤖\n链接:https://q.rkcn.top"
|
||||
}
|
||||
Reference in New Issue
Block a user