mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-26 03:44:58 +08:00
Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9b8c5a3499 | ||
|
|
53dde0607d | ||
|
|
7f034b4ffa | ||
|
|
599ab83100 | ||
|
|
f4a3508ec2 | ||
|
|
44b92909eb | ||
|
|
8ed07b8d1a | ||
|
|
2ff9ced15e | ||
|
|
641b8d71ed | ||
|
|
a31b450f54 | ||
|
|
97bb24c5b9 | ||
|
|
5e5a3639d1 | ||
|
|
0a68a77e28 | ||
|
|
11a0c4142e | ||
|
|
d214d80579 | ||
|
|
ed719fd44e | ||
|
|
5dc6bed0d1 | ||
|
|
b1244a4d4e | ||
|
|
6aa325a4b1 | ||
|
|
88a11561f9 | ||
|
|
fd30022065 | ||
|
|
9486312737 | ||
|
|
e37070a985 | ||
|
|
ffb98ecca2 | ||
|
|
29bd69ef97 | ||
|
|
e46c9530cc | ||
|
|
7ddd303e2d | ||
|
|
66798a1d0f | ||
|
|
bd05afdf14 | ||
|
|
136e48f7ee | ||
|
|
facb5f177a |
5
.github/workflows/build-dev-image.yaml
vendored
5
.github/workflows/build-dev-image.yaml
vendored
@@ -7,9 +7,14 @@ on:
|
||||
jobs:
|
||||
build-dev-image:
|
||||
runs-on: ubuntu-latest
|
||||
# 如果是tag则跳过
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Generate Tag
|
||||
id: generate_tag
|
||||
run: |
|
||||
|
||||
3
.github/workflows/build-docker-image.yml
vendored
3
.github/workflows/build-docker-image.yml
vendored
@@ -13,6 +13,9 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: judge has env GITHUB_REF # 如果没有GITHUB_REF环境变量,则把github.ref变量赋值给GITHUB_REF
|
||||
run: |
|
||||
if [ -z "$GITHUB_REF" ]; then
|
||||
|
||||
10
.github/workflows/build-release-artifacts.yaml
vendored
10
.github/workflows/build-release-artifacts.yaml
vendored
@@ -12,6 +12,8 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check version
|
||||
id: check_version
|
||||
@@ -50,3 +52,11 @@ jobs:
|
||||
with:
|
||||
name: langbot-${{ steps.check_version.outputs.version }}-all
|
||||
path: .
|
||||
|
||||
- name: Upload To Release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.RELEASE_UPLOAD_GITHUB_TOKEN }}
|
||||
run: |
|
||||
# 本目录下所有文件打包成zip
|
||||
zip -r langbot-${{ steps.check_version.outputs.version }}-all.zip .
|
||||
gh release upload ${{ github.event.release.tag_name }} langbot-${{ steps.check_version.outputs.version }}-all.zip
|
||||
|
||||
80
.github/workflows/test-pr.yml
vendored
80
.github/workflows/test-pr.yml
vendored
@@ -1,80 +0,0 @@
|
||||
name: Test Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [ready_for_review]
|
||||
paths:
|
||||
# 任何py文件改动都会触发
|
||||
- '**.py'
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
# 允许手动触发
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
perform-test:
|
||||
runs-on: ubuntu-latest
|
||||
# 如果事件为pull_request_review且review状态为approved,则执行
|
||||
if: >
|
||||
github.event_name == 'pull_request' ||
|
||||
(github.event_name == 'pull_request_review' && github.event.review.state == 'APPROVED') ||
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'issue_comment' && github.event.issue.pull_request != '' && contains(github.event.comment.body, '/test') && github.event.comment.user.login == 'RockChinQ')
|
||||
steps:
|
||||
# 签出测试工程仓库代码
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# 仓库地址
|
||||
repository: RockChinQ/qcg-tester
|
||||
# 仓库路径
|
||||
path: qcg-tester
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd qcg-tester
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Get PR details
|
||||
id: get-pr
|
||||
if: github.event_name == 'issue_comment'
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: GET /repos/${{ github.repository }}/pulls/${{ github.event.issue.number }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set PR source branch as env variable
|
||||
if: github.event_name == 'issue_comment'
|
||||
run: |
|
||||
PR_SOURCE_BRANCH=$(echo '${{ steps.get-pr.outputs.data }}' | jq -r '.head.ref')
|
||||
echo "BRANCH=$PR_SOURCE_BRANCH" >> $GITHUB_ENV
|
||||
|
||||
- name: Set PR Branch as bash env
|
||||
if: github.event_name != 'issue_comment'
|
||||
run: |
|
||||
echo "BRANCH=${{ github.head_ref }}" >> $GITHUB_ENV
|
||||
- name: Set OpenAI API Key from Secrets
|
||||
run: |
|
||||
echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> $GITHUB_ENV
|
||||
- name: Set OpenAI Reverse Proxy URL from Secrets
|
||||
run: |
|
||||
echo "OPENAI_REVERSE_PROXY=${{ secrets.OPENAI_REVERSE_PROXY }}" >> $GITHUB_ENV
|
||||
- name: Run test
|
||||
run: |
|
||||
cd qcg-tester
|
||||
python main.py
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
run: |
|
||||
cd qcg-tester/resource/QChatGPT
|
||||
curl -Os https://uploader.codecov.io/latest/linux/codecov
|
||||
chmod +x codecov
|
||||
./codecov -t ${{ secrets.CODECOV_TOKEN }}
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -2,7 +2,6 @@
|
||||
.idea/
|
||||
__pycache__/
|
||||
database.db
|
||||
qchatgpt.log
|
||||
langbot.log
|
||||
/banlist.py
|
||||
/plugins/
|
||||
@@ -17,8 +16,7 @@ scenario/
|
||||
!scenario/default-template.json
|
||||
override.json
|
||||
cookies.json
|
||||
res/announcement_saved
|
||||
res/announcement_saved.json
|
||||
data/labels/announcement_saved.json
|
||||
cmdpriv.json
|
||||
tips.py
|
||||
.venv
|
||||
@@ -32,7 +30,7 @@ claude.json
|
||||
bard.json
|
||||
/*yaml
|
||||
!/docker-compose.yaml
|
||||
res/instance_id.json
|
||||
data/labels/instance_id.json
|
||||
.DS_Store
|
||||
/data
|
||||
botpy.log*
|
||||
|
||||
28
README.md
28
README.md
@@ -77,3 +77,31 @@
|
||||
- WebUI Demo: https://demo.langbot.dev/
|
||||
- 登录信息:邮箱:`demo@langbot.app` 密码:`langbot123456`
|
||||
- 注意:仅展示webui效果,公开环境,请不要在其中填入您的任何敏感信息。
|
||||
|
||||
## 🔌 组件兼容性
|
||||
|
||||
### 消息平台
|
||||
|
||||
| 平台 | 状态 | 备注 |
|
||||
| --- | --- | --- |
|
||||
| OneBot v11 | ✅ | QQ 个人号私聊、群聊 |
|
||||
| go-cqhttp | ✅ | QQ 个人号私聊、群聊 |
|
||||
| QQ 官方 API | ✅ | QQ 频道机器人,支持频道、私聊、群聊 |
|
||||
| 企业微信 | 🚧 | |
|
||||
| 钉钉 | 🚧 | |
|
||||
|
||||
🚧: 正在开发中
|
||||
|
||||
### 大模型
|
||||
|
||||
| 模型 | 状态 | 备注 |
|
||||
| --- | --- | --- |
|
||||
| [OpenAI](https://platform.openai.com/) | ✅ | 可接入任何 OpenAI 接口格式模型 |
|
||||
| [DeepSeek](https://www.deepseek.com/) | ✅ | |
|
||||
| [Moonshot](https://www.moonshot.cn/) | ✅ | |
|
||||
| [Anthropic](https://www.anthropic.com/) | ✅ | |
|
||||
| [xAI](https://x.ai/) | ✅ | |
|
||||
| [智谱AI](https://open.bigmodel.cn/) | ✅ | |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps 平台 |
|
||||
| [Ollama](https://ollama.com/) | ✅ | 本地大模型管理平台 |
|
||||
| [GiteeAI](https://ai.gitee.com/) | ✅ | 大模型接口聚合平台 |
|
||||
|
||||
4
main.py
4
main.py
@@ -49,12 +49,10 @@ async def main_entry(loop: asyncio.AbstractEventLoop):
|
||||
generated_files = await files.generate_files()
|
||||
|
||||
if generated_files:
|
||||
print("以下文件不存在,已自动生成,请按需修改配置文件后重启:")
|
||||
print("以下文件不存在,已自动生成:")
|
||||
for file in generated_files:
|
||||
print("-", file)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
from pkg.core import boot
|
||||
await boot.main(loop)
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from .. import group
|
||||
class LogsRouterGroup(group.RouterGroup):
|
||||
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET'])
|
||||
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
|
||||
start_page_number = int(quart.request.args.get('start_page_number', 0))
|
||||
|
||||
@@ -13,7 +13,7 @@ from .. import group
|
||||
class PluginsRouterGroup(group.RouterGroup):
|
||||
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET'])
|
||||
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
plugins = self.ap.plugin_mgr.plugins()
|
||||
|
||||
@@ -23,14 +23,14 @@ class PluginsRouterGroup(group.RouterGroup):
|
||||
'plugins': plugins_data
|
||||
})
|
||||
|
||||
@self.route('/<author>/<plugin_name>/toggle', methods=['PUT'])
|
||||
@self.route('/<author>/<plugin_name>/toggle', methods=['PUT'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
data = await quart.request.json
|
||||
target_enabled = data.get('target_enabled')
|
||||
await self.ap.plugin_mgr.update_plugin_switch(plugin_name, target_enabled)
|
||||
return self.success()
|
||||
|
||||
@self.route('/<author>/<plugin_name>/update', methods=['POST'])
|
||||
@self.route('/<author>/<plugin_name>/update', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
@@ -44,7 +44,7 @@ class PluginsRouterGroup(group.RouterGroup):
|
||||
'task_id': wrapper.id
|
||||
})
|
||||
|
||||
@self.route('/<author>/<plugin_name>', methods=['DELETE'])
|
||||
@self.route('/<author>/<plugin_name>', methods=['DELETE'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
@@ -59,13 +59,13 @@ class PluginsRouterGroup(group.RouterGroup):
|
||||
'task_id': wrapper.id
|
||||
})
|
||||
|
||||
@self.route('/reorder', methods=['PUT'])
|
||||
@self.route('/reorder', methods=['PUT'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
data = await quart.request.json
|
||||
await self.ap.plugin_mgr.reorder_plugins(data.get('plugins'))
|
||||
return self.success()
|
||||
|
||||
@self.route('/install/github', methods=['POST'])
|
||||
@self.route('/install/github', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
data = await quart.request.json
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ class SettingsRouterGroup(group.RouterGroup):
|
||||
|
||||
async def initialize(self) -> None:
|
||||
|
||||
@self.route('', methods=['GET'])
|
||||
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
return self.success(
|
||||
data={
|
||||
@@ -23,7 +23,7 @@ class SettingsRouterGroup(group.RouterGroup):
|
||||
}
|
||||
)
|
||||
|
||||
@self.route('/<manager_name>', methods=['GET'])
|
||||
@self.route('/<manager_name>', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _(manager_name: str) -> str:
|
||||
|
||||
manager = self.ap.settings_mgr.get_manager(manager_name)
|
||||
@@ -44,7 +44,7 @@ class SettingsRouterGroup(group.RouterGroup):
|
||||
}
|
||||
)
|
||||
|
||||
@self.route('/<manager_name>/data', methods=['PUT'])
|
||||
@self.route('/<manager_name>/data', methods=['PUT'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _(manager_name: str) -> str:
|
||||
data = await quart.request.json
|
||||
manager = self.ap.settings_mgr.get_manager(manager_name)
|
||||
|
||||
@@ -9,7 +9,7 @@ from .. import group
|
||||
class StatsRouterGroup(group.RouterGroup):
|
||||
|
||||
async def initialize(self) -> None:
|
||||
@self.route('/basic', methods=['GET'])
|
||||
@self.route('/basic', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
|
||||
conv_count = 0
|
||||
|
||||
@@ -20,7 +20,7 @@ class SystemRouterGroup(group.RouterGroup):
|
||||
}
|
||||
)
|
||||
|
||||
@self.route('/tasks', methods=['GET'])
|
||||
@self.route('/tasks', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
task_type = quart.request.args.get("type")
|
||||
|
||||
@@ -31,7 +31,7 @@ class SystemRouterGroup(group.RouterGroup):
|
||||
data=self.ap.task_mgr.get_tasks_dict(task_type)
|
||||
)
|
||||
|
||||
@self.route('/tasks/<task_id>', methods=['GET'])
|
||||
@self.route('/tasks/<task_id>', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _(task_id: str) -> str:
|
||||
task = self.ap.task_mgr.get_task_by_id(int(task_id))
|
||||
|
||||
@@ -40,7 +40,7 @@ class SystemRouterGroup(group.RouterGroup):
|
||||
|
||||
return self.success(data=task.to_dict())
|
||||
|
||||
@self.route('/reload', methods=['POST'])
|
||||
@self.route('/reload', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
json_data = await quart.request.json
|
||||
|
||||
@@ -51,7 +51,7 @@ class SystemRouterGroup(group.RouterGroup):
|
||||
)
|
||||
return self.success()
|
||||
|
||||
@self.route('/_debug/exec', methods=['POST'])
|
||||
@self.route('/_debug/exec', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
if not constants.debug_mode:
|
||||
return self.http_status(403, 403, "Forbidden")
|
||||
|
||||
@@ -13,14 +13,14 @@ identifier = {
|
||||
'instance_create_ts': 0,
|
||||
}
|
||||
|
||||
HOST_ID_FILE = os.path.expanduser('~/.qchatgpt/host_id.json')
|
||||
INSTANCE_ID_FILE = 'res/instance_id.json'
|
||||
HOST_ID_FILE = os.path.expanduser('~/.langbot/host_id.json')
|
||||
INSTANCE_ID_FILE = 'data/labels/instance_id.json'
|
||||
|
||||
def init():
|
||||
global identifier
|
||||
|
||||
if not os.path.exists(os.path.expanduser('~/.qchatgpt')):
|
||||
os.mkdir(os.path.expanduser('~/.qchatgpt'))
|
||||
if not os.path.exists(os.path.expanduser('~/.langbot')):
|
||||
os.mkdir(os.path.expanduser('~/.langbot'))
|
||||
|
||||
if not os.path.exists(HOST_ID_FILE):
|
||||
new_host_id = 'host_'+str(uuid.uuid4())
|
||||
|
||||
@@ -197,5 +197,27 @@ class Application:
|
||||
|
||||
await self.plugin_mgr.load_plugins()
|
||||
await self.plugin_mgr.initialize_plugins()
|
||||
case core_entities.LifecycleControlScope.PROVIDER.value:
|
||||
self.logger.info("执行热重载 scope="+scope)
|
||||
|
||||
llm_model_mgr_inst = llm_model_mgr.ModelManager(self)
|
||||
await llm_model_mgr_inst.initialize()
|
||||
self.model_mgr = llm_model_mgr_inst
|
||||
|
||||
llm_session_mgr_inst = llm_session_mgr.SessionManager(self)
|
||||
await llm_session_mgr_inst.initialize()
|
||||
self.sess_mgr = llm_session_mgr_inst
|
||||
|
||||
llm_prompt_mgr_inst = llm_prompt_mgr.PromptManager(self)
|
||||
await llm_prompt_mgr_inst.initialize()
|
||||
self.prompt_mgr = llm_prompt_mgr_inst
|
||||
|
||||
llm_tool_mgr_inst = llm_tool_mgr.ToolManager(self)
|
||||
await llm_tool_mgr_inst.initialize()
|
||||
self.tool_mgr = llm_tool_mgr_inst
|
||||
|
||||
runner_mgr_inst = runnermgr.RunnerManager(self)
|
||||
await runner_mgr_inst.initialize()
|
||||
self.runner_mgr = runner_mgr_inst
|
||||
case _:
|
||||
pass
|
||||
pass
|
||||
@@ -24,6 +24,7 @@ required_paths = [
|
||||
"data/scenario",
|
||||
"data/logs",
|
||||
"data/config",
|
||||
"data/labels",
|
||||
"plugins"
|
||||
]
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ class LifecycleControlScope(enum.Enum):
|
||||
APPLICATION = "application"
|
||||
PLATFORM = "platform"
|
||||
PLUGIN = "plugin"
|
||||
PROVIDER = "provider"
|
||||
|
||||
|
||||
class LauncherTypes(enum.Enum):
|
||||
|
||||
25
pkg/core/migrations/m018_xai_config.py
Normal file
25
pkg/core/migrations/m018_xai_config.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import migration
|
||||
|
||||
|
||||
@migration.migration_class("xai-config", 18)
|
||||
class XaiConfigMigration(migration.Migration):
|
||||
"""迁移"""
|
||||
|
||||
async def need_migrate(self) -> bool:
|
||||
"""判断当前环境是否需要运行此迁移"""
|
||||
return 'xai-chat-completions' not in self.ap.provider_cfg.data['requester']
|
||||
|
||||
async def run(self):
|
||||
"""执行迁移"""
|
||||
self.ap.provider_cfg.data['requester']['xai-chat-completions'] = {
|
||||
"base-url": "https://api.x.ai/v1",
|
||||
"args": {},
|
||||
"timeout": 120
|
||||
}
|
||||
self.ap.provider_cfg.data['keys']['xai'] = [
|
||||
"xai-1234567890"
|
||||
]
|
||||
|
||||
await self.ap.provider_cfg.dump_config()
|
||||
25
pkg/core/migrations/m019_zhipuai_config.py
Normal file
25
pkg/core/migrations/m019_zhipuai_config.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .. import migration
|
||||
|
||||
|
||||
@migration.migration_class("zhipuai-config", 19)
|
||||
class ZhipuaiConfigMigration(migration.Migration):
|
||||
"""迁移"""
|
||||
|
||||
async def need_migrate(self) -> bool:
|
||||
"""判断当前环境是否需要运行此迁移"""
|
||||
return 'zhipuai-chat-completions' not in self.ap.provider_cfg.data['requester']
|
||||
|
||||
async def run(self):
|
||||
"""执行迁移"""
|
||||
self.ap.provider_cfg.data['requester']['zhipuai-chat-completions'] = {
|
||||
"base-url": "https://open.bigmodel.cn/api/paas/v4",
|
||||
"args": {},
|
||||
"timeout": 120
|
||||
}
|
||||
self.ap.provider_cfg.data['keys']['zhipuai'] = [
|
||||
"xxxxxxx"
|
||||
]
|
||||
|
||||
await self.ap.provider_cfg.dump_config()
|
||||
@@ -7,7 +7,7 @@ from .. import migration
|
||||
from ..migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion
|
||||
from ..migrations import m005_deepseek_cfg_completion, m006_vision_config, m007_qcg_center_url, m008_ad_fixwin_config_migrate, m009_msg_truncator_cfg
|
||||
from ..migrations import m010_ollama_requester_config, m011_command_prefix_config, m012_runner_config, m013_http_api_config, m014_force_delay_config
|
||||
from ..migrations import m015_gitee_ai_config, m016_dify_service_api, m017_dify_api_timeout_params
|
||||
from ..migrations import m015_gitee_ai_config, m016_dify_service_api, m017_dify_api_timeout_params, m018_xai_config, m019_zhipuai_config
|
||||
|
||||
|
||||
@stage.stage_class("MigrationStage")
|
||||
|
||||
@@ -105,7 +105,7 @@ class ChatMessageHandler(handler.MessageHandler):
|
||||
await self.ap.ctr_mgr.usage.post_query_record(
|
||||
session_type=query.session.launcher_type.value,
|
||||
session_id=str(query.session.launcher_id),
|
||||
query_ability_provider="QChatGPT.Chat",
|
||||
query_ability_provider="LangBot.Chat",
|
||||
usage=text_length,
|
||||
model_name=query.use_model.name,
|
||||
response_seconds=int(time.time() - start_time),
|
||||
|
||||
@@ -50,17 +50,6 @@ class PlatformManager:
|
||||
adapter=adapter
|
||||
)
|
||||
|
||||
async def on_stranger_message(event: platform_events.StrangerMessage, adapter: msadapter.MessageSourceAdapter):
|
||||
|
||||
await self.ap.query_pool.add_query(
|
||||
launcher_type=core_entities.LauncherTypes.PERSON,
|
||||
launcher_id=event.sender.id,
|
||||
sender_id=event.sender.id,
|
||||
message_event=event,
|
||||
message_chain=event.message_chain,
|
||||
adapter=adapter
|
||||
)
|
||||
|
||||
async def on_group_message(event: platform_events.GroupMessage, adapter: msadapter.MessageSourceAdapter):
|
||||
|
||||
await self.ap.query_pool.add_query(
|
||||
@@ -96,12 +85,6 @@ class PlatformManager:
|
||||
)
|
||||
self.adapters.append(adapter_inst)
|
||||
|
||||
if adapter_name == 'yiri-mirai':
|
||||
adapter_inst.register_listener(
|
||||
platform_events.StrangerMessage,
|
||||
on_stranger_message
|
||||
)
|
||||
|
||||
adapter_inst.register_listener(
|
||||
platform_events.FriendMessage,
|
||||
on_friend_message
|
||||
|
||||
@@ -60,7 +60,7 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
||||
elif type(msg) is forward.Forward:
|
||||
|
||||
for node in msg.node_list:
|
||||
msg_list.extend(await AiocqhttpMessageConverter.yiri2target(node.message_chain)[0])
|
||||
msg_list.extend((await AiocqhttpMessageConverter.yiri2target(node.message_chain))[0])
|
||||
|
||||
else:
|
||||
msg_list.append(aiocqhttp.MessageSegment.text(str(msg)))
|
||||
@@ -244,7 +244,7 @@ class AiocqhttpAdapter(adapter.MessageSourceAdapter):
|
||||
async def send_message(
|
||||
self, target_type: str, target_id: str, message: platform_message.MessageChain
|
||||
):
|
||||
aiocq_msg = await AiocqhttpMessageConverter.yiri2target(message)[0]
|
||||
aiocq_msg = (await AiocqhttpMessageConverter.yiri2target(message))[0]
|
||||
|
||||
if target_type == "group":
|
||||
await self.bot.send_group_msg(group_id=int(target_id), message=aiocq_msg)
|
||||
|
||||
@@ -9,6 +9,7 @@ from . import events
|
||||
from ..provider.tools import entities as tools_entities
|
||||
from ..core import app
|
||||
from ..platform.types import message as platform_message
|
||||
from ..platform import adapter as platform_adapter
|
||||
|
||||
|
||||
def register(
|
||||
@@ -113,6 +114,37 @@ class APIHost:
|
||||
async def initialize(self):
|
||||
pass
|
||||
|
||||
# ========== 插件可调用的 API(主程序API) ==========
|
||||
|
||||
def get_platform_adapters(self) -> list[platform_adapter.MessageSourceAdapter]:
|
||||
"""获取已启用的消息平台适配器列表
|
||||
|
||||
Returns:
|
||||
list[platform.adapter.MessageSourceAdapter]: 已启用的消息平台适配器列表
|
||||
"""
|
||||
return self.ap.platform_mgr.adapters
|
||||
|
||||
async def send_active_message(
|
||||
self,
|
||||
adapter: platform_adapter.MessageSourceAdapter,
|
||||
target_type: str,
|
||||
target_id: str,
|
||||
message: platform_message.MessageChain,
|
||||
):
|
||||
"""发送主动消息
|
||||
|
||||
Args:
|
||||
adapter (platform.adapter.MessageSourceAdapter): 消息平台适配器对象,调用 host.get_platform_adapters() 获取并取用其中某个
|
||||
target_type (str): 目标类型,`person`或`group`
|
||||
target_id (str): 目标ID
|
||||
message (platform.types.MessageChain): 消息链
|
||||
"""
|
||||
await adapter.send_message(
|
||||
target_type=target_type,
|
||||
target_id=target_id,
|
||||
message=message,
|
||||
)
|
||||
|
||||
def require_ver(
|
||||
self,
|
||||
ge: str,
|
||||
@@ -127,16 +159,16 @@ class APIHost:
|
||||
Returns:
|
||||
bool: 是否满足要求, False时为无法获取版本号,True时为满足要求,报错为不满足要求
|
||||
"""
|
||||
qchatgpt_version = ""
|
||||
langbot_version = ""
|
||||
|
||||
try:
|
||||
qchatgpt_version = self.ap.ver_mgr.get_current_version() # 从updater模块获取版本号
|
||||
langbot_version = self.ap.ver_mgr.get_current_version() # 从updater模块获取版本号
|
||||
except:
|
||||
return False
|
||||
|
||||
if self.ap.ver_mgr.compare_version_str(qchatgpt_version, ge) < 0 or \
|
||||
(self.ap.ver_mgr.compare_version_str(qchatgpt_version, le) > 0):
|
||||
raise Exception("LangBot 版本不满足要求,某些功能(可能是由插件提供的)无法正常使用。(要求版本:{}-{},但当前版本:{})".format(ge, le, qchatgpt_version))
|
||||
if self.ap.ver_mgr.compare_version_str(langbot_version, ge) < 0 or \
|
||||
(self.ap.ver_mgr.compare_version_str(langbot_version, le) > 0):
|
||||
raise Exception("LangBot 版本不满足要求,某些功能(可能是由插件提供的)无法正常使用。(要求版本:{}-{},但当前版本:{})".format(ge, le, langbot_version))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from . import entities, requester
|
||||
from ...core import app
|
||||
|
||||
from . import token
|
||||
from .requesters import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat, giteeaichatcmpl
|
||||
from .requesters import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat, giteeaichatcmpl, xaichatcmpl, zhipuaichatcmpl
|
||||
|
||||
FETCH_MODEL_LIST_URL = "https://api.qchatgpt.rockchin.top/api/v2/fetch/model_list"
|
||||
|
||||
|
||||
@@ -124,7 +124,7 @@ class OpenAIChatCompletions(requester.LLMAPIRequester):
|
||||
req_messages.append(msg_dict)
|
||||
|
||||
try:
|
||||
return await self._closure(query, req_messages, model, funcs)
|
||||
return await self._closure(query=query, req_messages=req_messages, use_model=model, use_funcs=funcs)
|
||||
except asyncio.TimeoutError:
|
||||
raise errors.RequesterError('请求超时')
|
||||
except openai.BadRequestError as e:
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ....core import app
|
||||
|
||||
from . import chatcmpl
|
||||
from .. import entities, errors, requester
|
||||
from ....core import entities as core_entities, app
|
||||
@@ -19,6 +17,7 @@ class DeepseekChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||
|
||||
async def _closure(
|
||||
self,
|
||||
query: core_entities.Query,
|
||||
req_messages: list[dict],
|
||||
use_model: entities.LLMModelInfo,
|
||||
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||
|
||||
@@ -8,7 +8,7 @@ import typing
|
||||
|
||||
from . import chatcmpl
|
||||
from .. import entities, errors, requester
|
||||
from ....core import app
|
||||
from ....core import app, entities as core_entities
|
||||
from ... import entities as llm_entities
|
||||
from ...tools import entities as tools_entities
|
||||
from .. import entities as modelmgr_entities
|
||||
@@ -24,6 +24,7 @@ class GiteeAIChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||
|
||||
async def _closure(
|
||||
self,
|
||||
query: core_entities.Query,
|
||||
req_messages: list[dict],
|
||||
use_model: entities.LLMModelInfo,
|
||||
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||
|
||||
@@ -19,6 +19,7 @@ class MoonshotChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||
|
||||
async def _closure(
|
||||
self,
|
||||
query: core_entities.Query,
|
||||
req_messages: list[dict],
|
||||
use_model: entities.LLMModelInfo,
|
||||
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||
|
||||
21
pkg/provider/modelmgr/requesters/xaichatcmpl.py
Normal file
21
pkg/provider/modelmgr/requesters/xaichatcmpl.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import openai
|
||||
|
||||
from . import chatcmpl
|
||||
from .. import requester
|
||||
from ....core import app
|
||||
|
||||
|
||||
@requester.requester_class("xai-chat-completions")
|
||||
class XaiChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||
"""xAI ChatCompletion API 请求器"""
|
||||
|
||||
client: openai.AsyncClient
|
||||
|
||||
requester_cfg: dict
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
self.ap = ap
|
||||
|
||||
self.requester_cfg = self.ap.provider_cfg.data['requester']['xai-chat-completions']
|
||||
21
pkg/provider/modelmgr/requesters/zhipuaichatcmpl.py
Normal file
21
pkg/provider/modelmgr/requesters/zhipuaichatcmpl.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import openai
|
||||
|
||||
from ....core import app
|
||||
from . import chatcmpl
|
||||
from .. import requester
|
||||
|
||||
|
||||
@requester.requester_class("zhipuai-chat-completions")
|
||||
class ZhipuAIChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||
"""智谱AI ChatCompletion API 请求器"""
|
||||
|
||||
client: openai.AsyncClient
|
||||
|
||||
requester_cfg: dict
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
self.ap = ap
|
||||
|
||||
self.requester_cfg = self.ap.provider_cfg.data['requester']['zhipuai-chat-completions']
|
||||
@@ -5,6 +5,8 @@ import json
|
||||
import uuid
|
||||
import base64
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .. import runner
|
||||
from ...core import entities as core_entities
|
||||
from .. import entities as llm_entities
|
||||
@@ -97,7 +99,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
|
||||
files=files,
|
||||
timeout=self.ap.provider_cfg.data["dify-service-api"]["chat"]["timeout"],
|
||||
):
|
||||
self.ap.logger.debug("dify-chat-chunk: ", chunk)
|
||||
self.ap.logger.debug("dify-chat-chunk: " + str(chunk))
|
||||
|
||||
if chunk['event'] == 'workflow_started':
|
||||
mode = "workflow"
|
||||
@@ -149,7 +151,8 @@ class DifyServiceAPIRunner(runner.RequestRunner):
|
||||
files=files,
|
||||
timeout=self.ap.provider_cfg.data["dify-service-api"]["chat"]["timeout"],
|
||||
):
|
||||
self.ap.logger.debug("dify-agent-chunk: ", chunk)
|
||||
self.ap.logger.debug("dify-agent-chunk: " + str(chunk))
|
||||
|
||||
if chunk["event"] in ignored_events:
|
||||
continue
|
||||
if chunk["event"] == "agent_thought":
|
||||
@@ -179,6 +182,21 @@ class DifyServiceAPIRunner(runner.RequestRunner):
|
||||
],
|
||||
)
|
||||
yield msg
|
||||
if chunk['event'] == 'message_file':
|
||||
|
||||
if chunk['type'] == 'image' and chunk['belongs_to'] == 'assistant':
|
||||
|
||||
base_url = self.dify_client.base_url
|
||||
|
||||
if base_url.endswith('/v1'):
|
||||
base_url = base_url[:-3]
|
||||
|
||||
image_url = base_url + chunk['url']
|
||||
|
||||
yield llm_entities.Message(
|
||||
role="assistant",
|
||||
content=[llm_entities.ContentElement.from_image_url(image_url)],
|
||||
)
|
||||
|
||||
query.session.using_conversation.uuid = chunk["conversation_id"]
|
||||
|
||||
@@ -215,7 +233,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
|
||||
files=files,
|
||||
timeout=self.ap.provider_cfg.data["dify-service-api"]["workflow"]["timeout"],
|
||||
):
|
||||
self.ap.logger.debug("dify-workflow-chunk: ", chunk)
|
||||
self.ap.logger.debug("dify-workflow-chunk: " + str(chunk))
|
||||
if chunk["event"] in ignored_events:
|
||||
continue
|
||||
|
||||
|
||||
@@ -62,11 +62,11 @@ class AnnouncementManager:
|
||||
async def fetch_saved(
|
||||
self
|
||||
) -> list[Announcement]:
|
||||
if not os.path.exists("res/announcement_saved.json"):
|
||||
with open("res/announcement_saved.json", "w", encoding="utf-8") as f:
|
||||
if not os.path.exists("data/labels/announcement_saved.json"):
|
||||
with open("data/labels/announcement_saved.json", "w", encoding="utf-8") as f:
|
||||
f.write("[]")
|
||||
|
||||
with open("res/announcement_saved.json", "r", encoding="utf-8") as f:
|
||||
with open("data/labels/announcement_saved.json", "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
if not content:
|
||||
@@ -79,7 +79,7 @@ class AnnouncementManager:
|
||||
content: list[Announcement]
|
||||
):
|
||||
|
||||
with open("res/announcement_saved.json", "w", encoding="utf-8") as f:
|
||||
with open("data/labels/announcement_saved.json", "w", encoding="utf-8") as f:
|
||||
f.write(json.dumps([
|
||||
item.to_dict() for item in content
|
||||
], indent=4, ensure_ascii=False))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
semantic_version = "v3.4.1.3"
|
||||
semantic_version = "v3.4.2.1"
|
||||
|
||||
debug_mode = False
|
||||
|
||||
|
||||
@@ -115,6 +115,97 @@
|
||||
"name": "deepseek-coder",
|
||||
"requester": "deepseek-chat-completions",
|
||||
"token_mgr": "deepseek"
|
||||
},
|
||||
{
|
||||
"name": "grok-2-latest",
|
||||
"requester": "xai-chat-completions",
|
||||
"token_mgr": "xai"
|
||||
},
|
||||
{
|
||||
"name": "grok-2",
|
||||
"requester": "xai-chat-completions",
|
||||
"token_mgr": "xai"
|
||||
},
|
||||
{
|
||||
"name": "grok-2-vision-1212",
|
||||
"requester": "xai-chat-completions",
|
||||
"token_mgr": "xai",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "grok-2-1212",
|
||||
"requester": "xai-chat-completions",
|
||||
"token_mgr": "xai"
|
||||
},
|
||||
{
|
||||
"name": "grok-vision-beta",
|
||||
"requester": "xai-chat-completions",
|
||||
"token_mgr": "xai",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "grok-beta",
|
||||
"requester": "xai-chat-completions",
|
||||
"token_mgr": "xai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-plus",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-0520",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-air",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-airx",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-long",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-flashx",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4-flash",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai"
|
||||
},
|
||||
{
|
||||
"name": "glm-4v-plus",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "glm-4v",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "glm-4v-flash",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai",
|
||||
"vision_supported": true
|
||||
},
|
||||
{
|
||||
"name": "glm-zero-preview",
|
||||
"requester": "zhipuai-chat-completions",
|
||||
"token_mgr": "zhipuai",
|
||||
"vision_supported": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -16,6 +16,12 @@
|
||||
],
|
||||
"gitee-ai": [
|
||||
"XXXXX"
|
||||
],
|
||||
"xai": [
|
||||
"xai-1234567890"
|
||||
],
|
||||
"zhipuai": [
|
||||
"xxxxxxx"
|
||||
]
|
||||
},
|
||||
"requester": {
|
||||
@@ -50,6 +56,16 @@
|
||||
"base-url": "https://ai.gitee.com/v1",
|
||||
"args": {},
|
||||
"timeout": 120
|
||||
},
|
||||
"xai-chat-completions": {
|
||||
"base-url": "https://api.x.ai/v1",
|
||||
"args": {},
|
||||
"timeout": 120
|
||||
},
|
||||
"zhipuai-chat-completions": {
|
||||
"base-url": "https://open.bigmodel.cn/api/paas/v4",
|
||||
"args": {},
|
||||
"timeout": 120
|
||||
}
|
||||
},
|
||||
"model": "gpt-4o",
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
"openai": {
|
||||
"type": "array",
|
||||
"title": "OpenAI API 密钥",
|
||||
"description": "OpenAI API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -31,7 +30,6 @@
|
||||
"anthropic": {
|
||||
"type": "array",
|
||||
"title": "Anthropic API 密钥",
|
||||
"description": "Anthropic API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -40,7 +38,6 @@
|
||||
"moonshot": {
|
||||
"type": "array",
|
||||
"title": "Moonshot API 密钥",
|
||||
"description": "Moonshot API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -49,7 +46,6 @@
|
||||
"deepseek": {
|
||||
"type": "array",
|
||||
"title": "DeepSeek API 密钥",
|
||||
"description": "DeepSeek API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -57,8 +53,23 @@
|
||||
},
|
||||
"gitee": {
|
||||
"type": "array",
|
||||
"title": "Gitee API 密钥",
|
||||
"description": "Gitee API 密钥",
|
||||
"title": "Gitee AI API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"xai": {
|
||||
"type": "array",
|
||||
"title": "xAI API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": []
|
||||
},
|
||||
"zhipuai": {
|
||||
"type": "array",
|
||||
"title": "智谱AI API 密钥",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -188,6 +199,42 @@
|
||||
"default": 120
|
||||
}
|
||||
}
|
||||
},
|
||||
"xai-chat-completions": {
|
||||
"type": "object",
|
||||
"title": "xAI API 请求配置",
|
||||
"description": "仅可编辑 URL 和 超时时间,额外请求参数不支持可视化编辑,请到编辑器编辑",
|
||||
"properties": {
|
||||
"base-url": {
|
||||
"type": "string",
|
||||
"title": "API URL"
|
||||
},
|
||||
"args": {
|
||||
"type": "object"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "number",
|
||||
"title": "API 请求超时时间",
|
||||
"default": 120
|
||||
}
|
||||
}
|
||||
},
|
||||
"zhipuai-chat-completions": {
|
||||
"type": "object",
|
||||
"title": "智谱AI API 请求配置",
|
||||
"description": "仅可编辑 URL 和 超时时间,额外请求参数不支持可视化编辑,请到编辑器编辑",
|
||||
"properties": {
|
||||
"base-url": {
|
||||
"type": "string",
|
||||
"title": "API URL"
|
||||
},
|
||||
"args": {
|
||||
"type": "object"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -79,6 +79,12 @@
|
||||
重载插件
|
||||
</v-list-item-title>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item @click="reload('provider')">
|
||||
<v-list-item-title>
|
||||
重载 LLM 管理器
|
||||
</v-list-item-title>
|
||||
</v-list-item>
|
||||
</v-list>
|
||||
</v-menu>
|
||||
</v-list-item>
|
||||
@@ -169,7 +175,8 @@ function openDocs() {
|
||||
|
||||
const reloadScopeLabel = {
|
||||
'platform': "消息平台",
|
||||
'plugin': "插件"
|
||||
'plugin': "插件",
|
||||
'provider': "LLM 管理器"
|
||||
}
|
||||
|
||||
function reload(scope) {
|
||||
|
||||
Reference in New Issue
Block a user