Compare commits

..

15 Commits

Author SHA1 Message Date
Junyan Qin
9b8c5a3499 chore: release v3.4.2.1 2025-01-06 21:32:42 +08:00
Junyan Qin
53dde0607d Merge pull request #972 from RockChinQ/fix/dify-back-image
fix(dify): display agent image
2025-01-06 21:29:26 +08:00
Junyan Qin
7f034b4ffa fix(dify): display agent image 2025-01-06 21:28:36 +08:00
Junyan Qin
599ab83100 doc(README): perf llm comments 2025-01-06 20:33:35 +08:00
Junyan Qin
f4a3508ec2 Merge pull request #971 from RockChinQ/feat/zhipuai
feat: add supports for zhipuai(chatglm)
2025-01-06 20:29:26 +08:00
Junyan Qin
44b92909eb feat: add supports for zhipuai(chatglm) 2025-01-06 20:27:10 +08:00
Junyan Qin
8ed07b8d1a feat: add langbot scope plugin api 2025-01-06 19:49:32 +08:00
Junyan Qin
2ff9ced15e doc(README): add go-cqhttp 2025-01-06 09:53:56 +08:00
Junyan Qin
641b8d71ed doc(README): add compability comment 2025-01-06 09:51:40 +08:00
Junyan Qin
a31b450f54 chore: release v3.4.2 2025-01-04 23:07:52 +08:00
Junyan Qin
97bb24c5b9 feat: supports for provider reloading 2025-01-04 23:07:10 +08:00
Junyan Qin
5e5a3639d1 Merge pull request #958 from zhihuanwang/master
增加xAI模型支持
2025-01-04 22:25:51 +08:00
Junyan Qin
0a68a77e28 feat: refactor 2025-01-04 22:24:05 +08:00
kevin
11a0c4142e 增加xAI模型支持
推荐llm-models.json新增
```json
,
        {
            "name": "grok-2-vision-1212",
            "model_name": "grok-2-vision-1212",
            "requester": "grok-chat-completions",
            "token_mgr": "grok",
            "vision_supported": true
        }
```
provider.json requester增加
```json
,
        "grok-chat-completions": {
            "args": {},
            "base-url": "https://api.x.ai/v1",
            "timeout": 120
        }
```
keys增加:
```json
,
"grok": [
            "xai-your-key"
        ]
```
2025-01-04 22:13:47 +08:00
Junyan Qin
d214d80579 Update README.md 2025-01-04 11:11:57 +08:00
21 changed files with 383 additions and 34 deletions

View File

@@ -1,8 +1,3 @@
> [!IMPORTANT]
> 我们被人在 X.com 和 pump.fun 上冒充了,以下两个账号利用本项目和作者信息在 X.com 上发布数字货币营销信息,请勿相信!我们已向 X 官方举报!我们从未以 LangBot 名义创建任何社交媒体账号或者数字货币。
> We have been impersonated on X.com and pump.fun . The following two accounts are using this project and author information to post digital currency marketing information on X.com. Please do not believe that! We have reported to X official! We have never created any social media account or digital currency under the name LangBot.
> 1. https://x.com/RockChinQ
> 2. https://x.com/LangBotAI
<p align="center">
<img src="https://docs.langbot.app/social.png" alt="LangBot"/>
@@ -82,3 +77,31 @@
- WebUI Demo: https://demo.langbot.dev/
- 登录信息:邮箱:`demo@langbot.app` 密码:`langbot123456`
- 注意仅展示webui效果公开环境请不要在其中填入您的任何敏感信息。
## 🔌 组件兼容性
### 消息平台
| 平台 | 状态 | 备注 |
| --- | --- | --- |
| OneBot v11 | ✅ | QQ 个人号私聊、群聊 |
| go-cqhttp | ✅ | QQ 个人号私聊、群聊 |
| QQ 官方 API | ✅ | QQ 频道机器人,支持频道、私聊、群聊 |
| 企业微信 | 🚧 | |
| 钉钉 | 🚧 | |
🚧: 正在开发中
### 大模型
| 模型 | 状态 | 备注 |
| --- | --- | --- |
| [OpenAI](https://platform.openai.com/) | ✅ | 可接入任何 OpenAI 接口格式模型 |
| [DeepSeek](https://www.deepseek.com/) | ✅ | |
| [Moonshot](https://www.moonshot.cn/) | ✅ | |
| [Anthropic](https://www.anthropic.com/) | ✅ | |
| [xAI](https://x.ai/) | ✅ | |
| [智谱AI](https://open.bigmodel.cn/) | ✅ | |
| [Dify](https://dify.ai) | ✅ | LLMOps 平台 |
| [Ollama](https://ollama.com/) | ✅ | 本地大模型管理平台 |
| [GiteeAI](https://ai.gitee.com/) | ✅ | 大模型接口聚合平台 |

View File

@@ -12,7 +12,7 @@ from .. import group
class LogsRouterGroup(group.RouterGroup):
async def initialize(self) -> None:
@self.route('', methods=['GET'])
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
start_page_number = int(quart.request.args.get('start_page_number', 0))

View File

@@ -13,7 +13,7 @@ from .. import group
class PluginsRouterGroup(group.RouterGroup):
async def initialize(self) -> None:
@self.route('', methods=['GET'])
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
plugins = self.ap.plugin_mgr.plugins()
@@ -23,14 +23,14 @@ class PluginsRouterGroup(group.RouterGroup):
'plugins': plugins_data
})
@self.route('/<author>/<plugin_name>/toggle', methods=['PUT'])
@self.route('/<author>/<plugin_name>/toggle', methods=['PUT'], auth_type=group.AuthType.USER_TOKEN)
async def _(author: str, plugin_name: str) -> str:
data = await quart.request.json
target_enabled = data.get('target_enabled')
await self.ap.plugin_mgr.update_plugin_switch(plugin_name, target_enabled)
return self.success()
@self.route('/<author>/<plugin_name>/update', methods=['POST'])
@self.route('/<author>/<plugin_name>/update', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
async def _(author: str, plugin_name: str) -> str:
ctx = taskmgr.TaskContext.new()
wrapper = self.ap.task_mgr.create_user_task(
@@ -44,7 +44,7 @@ class PluginsRouterGroup(group.RouterGroup):
'task_id': wrapper.id
})
@self.route('/<author>/<plugin_name>', methods=['DELETE'])
@self.route('/<author>/<plugin_name>', methods=['DELETE'], auth_type=group.AuthType.USER_TOKEN)
async def _(author: str, plugin_name: str) -> str:
ctx = taskmgr.TaskContext.new()
wrapper = self.ap.task_mgr.create_user_task(
@@ -59,13 +59,13 @@ class PluginsRouterGroup(group.RouterGroup):
'task_id': wrapper.id
})
@self.route('/reorder', methods=['PUT'])
@self.route('/reorder', methods=['PUT'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
data = await quart.request.json
await self.ap.plugin_mgr.reorder_plugins(data.get('plugins'))
return self.success()
@self.route('/install/github', methods=['POST'])
@self.route('/install/github', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
data = await quart.request.json

View File

@@ -9,7 +9,7 @@ class SettingsRouterGroup(group.RouterGroup):
async def initialize(self) -> None:
@self.route('', methods=['GET'])
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
return self.success(
data={
@@ -23,7 +23,7 @@ class SettingsRouterGroup(group.RouterGroup):
}
)
@self.route('/<manager_name>', methods=['GET'])
@self.route('/<manager_name>', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _(manager_name: str) -> str:
manager = self.ap.settings_mgr.get_manager(manager_name)
@@ -44,7 +44,7 @@ class SettingsRouterGroup(group.RouterGroup):
}
)
@self.route('/<manager_name>/data', methods=['PUT'])
@self.route('/<manager_name>/data', methods=['PUT'], auth_type=group.AuthType.USER_TOKEN)
async def _(manager_name: str) -> str:
data = await quart.request.json
manager = self.ap.settings_mgr.get_manager(manager_name)

View File

@@ -9,7 +9,7 @@ from .. import group
class StatsRouterGroup(group.RouterGroup):
async def initialize(self) -> None:
@self.route('/basic', methods=['GET'])
@self.route('/basic', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
conv_count = 0

View File

@@ -20,7 +20,7 @@ class SystemRouterGroup(group.RouterGroup):
}
)
@self.route('/tasks', methods=['GET'])
@self.route('/tasks', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
task_type = quart.request.args.get("type")
@@ -31,7 +31,7 @@ class SystemRouterGroup(group.RouterGroup):
data=self.ap.task_mgr.get_tasks_dict(task_type)
)
@self.route('/tasks/<task_id>', methods=['GET'])
@self.route('/tasks/<task_id>', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
async def _(task_id: str) -> str:
task = self.ap.task_mgr.get_task_by_id(int(task_id))
@@ -40,7 +40,7 @@ class SystemRouterGroup(group.RouterGroup):
return self.success(data=task.to_dict())
@self.route('/reload', methods=['POST'])
@self.route('/reload', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
json_data = await quart.request.json
@@ -51,7 +51,7 @@ class SystemRouterGroup(group.RouterGroup):
)
return self.success()
@self.route('/_debug/exec', methods=['POST'])
@self.route('/_debug/exec', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
async def _() -> str:
if not constants.debug_mode:
return self.http_status(403, 403, "Forbidden")

View File

@@ -197,5 +197,27 @@ class Application:
await self.plugin_mgr.load_plugins()
await self.plugin_mgr.initialize_plugins()
case core_entities.LifecycleControlScope.PROVIDER.value:
self.logger.info("执行热重载 scope="+scope)
llm_model_mgr_inst = llm_model_mgr.ModelManager(self)
await llm_model_mgr_inst.initialize()
self.model_mgr = llm_model_mgr_inst
llm_session_mgr_inst = llm_session_mgr.SessionManager(self)
await llm_session_mgr_inst.initialize()
self.sess_mgr = llm_session_mgr_inst
llm_prompt_mgr_inst = llm_prompt_mgr.PromptManager(self)
await llm_prompt_mgr_inst.initialize()
self.prompt_mgr = llm_prompt_mgr_inst
llm_tool_mgr_inst = llm_tool_mgr.ToolManager(self)
await llm_tool_mgr_inst.initialize()
self.tool_mgr = llm_tool_mgr_inst
runner_mgr_inst = runnermgr.RunnerManager(self)
await runner_mgr_inst.initialize()
self.runner_mgr = runner_mgr_inst
case _:
pass
pass

View File

@@ -23,6 +23,7 @@ class LifecycleControlScope(enum.Enum):
APPLICATION = "application"
PLATFORM = "platform"
PLUGIN = "plugin"
PROVIDER = "provider"
class LauncherTypes(enum.Enum):

View File

@@ -0,0 +1,25 @@
from __future__ import annotations
from .. import migration
@migration.migration_class("xai-config", 18)
class XaiConfigMigration(migration.Migration):
"""迁移"""
async def need_migrate(self) -> bool:
"""判断当前环境是否需要运行此迁移"""
return 'xai-chat-completions' not in self.ap.provider_cfg.data['requester']
async def run(self):
"""执行迁移"""
self.ap.provider_cfg.data['requester']['xai-chat-completions'] = {
"base-url": "https://api.x.ai/v1",
"args": {},
"timeout": 120
}
self.ap.provider_cfg.data['keys']['xai'] = [
"xai-1234567890"
]
await self.ap.provider_cfg.dump_config()

View File

@@ -0,0 +1,25 @@
from __future__ import annotations
from .. import migration
@migration.migration_class("zhipuai-config", 19)
class ZhipuaiConfigMigration(migration.Migration):
"""迁移"""
async def need_migrate(self) -> bool:
"""判断当前环境是否需要运行此迁移"""
return 'zhipuai-chat-completions' not in self.ap.provider_cfg.data['requester']
async def run(self):
"""执行迁移"""
self.ap.provider_cfg.data['requester']['zhipuai-chat-completions'] = {
"base-url": "https://open.bigmodel.cn/api/paas/v4",
"args": {},
"timeout": 120
}
self.ap.provider_cfg.data['keys']['zhipuai'] = [
"xxxxxxx"
]
await self.ap.provider_cfg.dump_config()

View File

@@ -7,7 +7,7 @@ from .. import migration
from ..migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion
from ..migrations import m005_deepseek_cfg_completion, m006_vision_config, m007_qcg_center_url, m008_ad_fixwin_config_migrate, m009_msg_truncator_cfg
from ..migrations import m010_ollama_requester_config, m011_command_prefix_config, m012_runner_config, m013_http_api_config, m014_force_delay_config
from ..migrations import m015_gitee_ai_config, m016_dify_service_api, m017_dify_api_timeout_params
from ..migrations import m015_gitee_ai_config, m016_dify_service_api, m017_dify_api_timeout_params, m018_xai_config, m019_zhipuai_config
@stage.stage_class("MigrationStage")

View File

@@ -9,6 +9,7 @@ from . import events
from ..provider.tools import entities as tools_entities
from ..core import app
from ..platform.types import message as platform_message
from ..platform import adapter as platform_adapter
def register(
@@ -113,6 +114,37 @@ class APIHost:
async def initialize(self):
pass
# ========== 插件可调用的 API主程序API ==========
def get_platform_adapters(self) -> list[platform_adapter.MessageSourceAdapter]:
"""获取已启用的消息平台适配器列表
Returns:
list[platform.adapter.MessageSourceAdapter]: 已启用的消息平台适配器列表
"""
return self.ap.platform_mgr.adapters
async def send_active_message(
self,
adapter: platform_adapter.MessageSourceAdapter,
target_type: str,
target_id: str,
message: platform_message.MessageChain,
):
"""发送主动消息
Args:
adapter (platform.adapter.MessageSourceAdapter): 消息平台适配器对象,调用 host.get_platform_adapters() 获取并取用其中某个
target_type (str): 目标类型,`person`或`group`
target_id (str): 目标ID
message (platform.types.MessageChain): 消息链
"""
await adapter.send_message(
target_type=target_type,
target_id=target_id,
message=message,
)
def require_ver(
self,
ge: str,

View File

@@ -6,7 +6,7 @@ from . import entities, requester
from ...core import app
from . import token
from .requesters import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat, giteeaichatcmpl
from .requesters import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat, giteeaichatcmpl, xaichatcmpl, zhipuaichatcmpl
FETCH_MODEL_LIST_URL = "https://api.qchatgpt.rockchin.top/api/v2/fetch/model_list"

View File

@@ -0,0 +1,21 @@
from __future__ import annotations
import openai
from . import chatcmpl
from .. import requester
from ....core import app
@requester.requester_class("xai-chat-completions")
class XaiChatCompletions(chatcmpl.OpenAIChatCompletions):
"""xAI ChatCompletion API 请求器"""
client: openai.AsyncClient
requester_cfg: dict
def __init__(self, ap: app.Application):
self.ap = ap
self.requester_cfg = self.ap.provider_cfg.data['requester']['xai-chat-completions']

View File

@@ -0,0 +1,21 @@
from __future__ import annotations
import openai
from ....core import app
from . import chatcmpl
from .. import requester
@requester.requester_class("zhipuai-chat-completions")
class ZhipuAIChatCompletions(chatcmpl.OpenAIChatCompletions):
"""智谱AI ChatCompletion API 请求器"""
client: openai.AsyncClient
requester_cfg: dict
def __init__(self, ap: app.Application):
self.ap = ap
self.requester_cfg = self.ap.provider_cfg.data['requester']['zhipuai-chat-completions']

View File

@@ -5,6 +5,8 @@ import json
import uuid
import base64
import aiohttp
from .. import runner
from ...core import entities as core_entities
from .. import entities as llm_entities
@@ -97,7 +99,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
files=files,
timeout=self.ap.provider_cfg.data["dify-service-api"]["chat"]["timeout"],
):
self.ap.logger.debug("dify-chat-chunk: ", chunk)
self.ap.logger.debug("dify-chat-chunk: " + str(chunk))
if chunk['event'] == 'workflow_started':
mode = "workflow"
@@ -149,7 +151,8 @@ class DifyServiceAPIRunner(runner.RequestRunner):
files=files,
timeout=self.ap.provider_cfg.data["dify-service-api"]["chat"]["timeout"],
):
self.ap.logger.debug("dify-agent-chunk: ", chunk)
self.ap.logger.debug("dify-agent-chunk: " + str(chunk))
if chunk["event"] in ignored_events:
continue
if chunk["event"] == "agent_thought":
@@ -179,6 +182,21 @@ class DifyServiceAPIRunner(runner.RequestRunner):
],
)
yield msg
if chunk['event'] == 'message_file':
if chunk['type'] == 'image' and chunk['belongs_to'] == 'assistant':
base_url = self.dify_client.base_url
if base_url.endswith('/v1'):
base_url = base_url[:-3]
image_url = base_url + chunk['url']
yield llm_entities.Message(
role="assistant",
content=[llm_entities.ContentElement.from_image_url(image_url)],
)
query.session.using_conversation.uuid = chunk["conversation_id"]
@@ -215,7 +233,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
files=files,
timeout=self.ap.provider_cfg.data["dify-service-api"]["workflow"]["timeout"],
):
self.ap.logger.debug("dify-workflow-chunk: ", chunk)
self.ap.logger.debug("dify-workflow-chunk: " + str(chunk))
if chunk["event"] in ignored_events:
continue

View File

@@ -1,4 +1,4 @@
semantic_version = "v3.4.1.5"
semantic_version = "v3.4.2.1"
debug_mode = False

View File

@@ -115,6 +115,97 @@
"name": "deepseek-coder",
"requester": "deepseek-chat-completions",
"token_mgr": "deepseek"
},
{
"name": "grok-2-latest",
"requester": "xai-chat-completions",
"token_mgr": "xai"
},
{
"name": "grok-2",
"requester": "xai-chat-completions",
"token_mgr": "xai"
},
{
"name": "grok-2-vision-1212",
"requester": "xai-chat-completions",
"token_mgr": "xai",
"vision_supported": true
},
{
"name": "grok-2-1212",
"requester": "xai-chat-completions",
"token_mgr": "xai"
},
{
"name": "grok-vision-beta",
"requester": "xai-chat-completions",
"token_mgr": "xai",
"vision_supported": true
},
{
"name": "grok-beta",
"requester": "xai-chat-completions",
"token_mgr": "xai"
},
{
"name": "glm-4-plus",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4-0520",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4-air",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4-airx",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4-long",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4-flashx",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4-flash",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai"
},
{
"name": "glm-4v-plus",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai",
"vision_supported": true
},
{
"name": "glm-4v",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai",
"vision_supported": true
},
{
"name": "glm-4v-flash",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai",
"vision_supported": true
},
{
"name": "glm-zero-preview",
"requester": "zhipuai-chat-completions",
"token_mgr": "zhipuai",
"vision_supported": true
}
]
}

View File

@@ -16,6 +16,12 @@
],
"gitee-ai": [
"XXXXX"
],
"xai": [
"xai-1234567890"
],
"zhipuai": [
"xxxxxxx"
]
},
"requester": {
@@ -50,6 +56,16 @@
"base-url": "https://ai.gitee.com/v1",
"args": {},
"timeout": 120
},
"xai-chat-completions": {
"base-url": "https://api.x.ai/v1",
"args": {},
"timeout": 120
},
"zhipuai-chat-completions": {
"base-url": "https://open.bigmodel.cn/api/paas/v4",
"args": {},
"timeout": 120
}
},
"model": "gpt-4o",

View File

@@ -22,7 +22,6 @@
"openai": {
"type": "array",
"title": "OpenAI API 密钥",
"description": "OpenAI API 密钥",
"items": {
"type": "string"
},
@@ -31,7 +30,6 @@
"anthropic": {
"type": "array",
"title": "Anthropic API 密钥",
"description": "Anthropic API 密钥",
"items": {
"type": "string"
},
@@ -40,7 +38,6 @@
"moonshot": {
"type": "array",
"title": "Moonshot API 密钥",
"description": "Moonshot API 密钥",
"items": {
"type": "string"
},
@@ -49,7 +46,6 @@
"deepseek": {
"type": "array",
"title": "DeepSeek API 密钥",
"description": "DeepSeek API 密钥",
"items": {
"type": "string"
},
@@ -57,8 +53,23 @@
},
"gitee": {
"type": "array",
"title": "Gitee API 密钥",
"description": "Gitee API 密钥",
"title": "Gitee AI API 密钥",
"items": {
"type": "string"
},
"default": []
},
"xai": {
"type": "array",
"title": "xAI API 密钥",
"items": {
"type": "string"
},
"default": []
},
"zhipuai": {
"type": "array",
"title": "智谱AI API 密钥",
"items": {
"type": "string"
},
@@ -188,6 +199,42 @@
"default": 120
}
}
},
"xai-chat-completions": {
"type": "object",
"title": "xAI API 请求配置",
"description": "仅可编辑 URL 和 超时时间,额外请求参数不支持可视化编辑,请到编辑器编辑",
"properties": {
"base-url": {
"type": "string",
"title": "API URL"
},
"args": {
"type": "object"
},
"timeout": {
"type": "number",
"title": "API 请求超时时间",
"default": 120
}
}
},
"zhipuai-chat-completions": {
"type": "object",
"title": "智谱AI API 请求配置",
"description": "仅可编辑 URL 和 超时时间,额外请求参数不支持可视化编辑,请到编辑器编辑",
"properties": {
"base-url": {
"type": "string",
"title": "API URL"
},
"args": {
"type": "object"
},
"timeout": {
"type": "number"
}
}
}
}
},

View File

@@ -79,6 +79,12 @@
重载插件
</v-list-item-title>
</v-list-item>
<v-list-item @click="reload('provider')">
<v-list-item-title>
重载 LLM 管理器
</v-list-item-title>
</v-list-item>
</v-list>
</v-menu>
</v-list-item>
@@ -169,7 +175,8 @@ function openDocs() {
const reloadScopeLabel = {
'platform': "消息平台",
'plugin': "插件"
'plugin': "插件",
'provider': "LLM 管理器"
}
function reload(scope) {