Compare commits

...

31 Commits

Author SHA1 Message Date
Junyan Qin
f9f03b81d1 chore: release v4.0.3.3 2025-05-22 10:49:24 +08:00
Junyan Qin
42171a9c07 fix: combine quote message not in default pipeline config 2025-05-22 10:44:33 +08:00
Junyan Qin
f1f00115c9 chore: update issue template 2025-05-22 10:42:59 +08:00
Junyan Qin
59bff61409 chore: release v4.0.3.2 2025-05-21 19:46:42 +08:00
Junyan Qin
778693a804 perf: desc of random 2025-05-21 19:45:45 +08:00
Junyan Qin
e5b2da225c perf: no longer get host ip 2025-05-21 19:42:04 +08:00
Steven Lynn
4a988b89a2 fix: update auto-reply probability description in trigger.yaml (#1463) 2025-05-21 17:50:23 +08:00
Junyan Qin
e5e8807312 perf: no longer ask for apikeys for ollama and lm studio 2025-05-20 16:01:20 +08:00
Junyan Qin
1376530c2e fix: conversation is null 2025-05-20 15:32:04 +08:00
Junyan Qin
7d34a2154b perf: unify i18n text class in frontend 2025-05-20 11:32:55 +08:00
Junyan Qin
ff335130ae chore: update CONTRIBUTING 2025-05-20 09:39:46 +08:00
Junyan Qin
0afef0ac0f chore: update pr template 2025-05-20 09:21:59 +08:00
Junyan Qin (Chin)
6447f270ea Update bug-report_en.yml 2025-05-20 09:16:30 +08:00
Junyan Qin (Chin)
81be62e1a4 Update bug-report_en.yml 2025-05-20 09:15:52 +08:00
Junyan Qin (Chin)
409909ccb1 Update bug-report_en.yml (#1456) 2025-05-20 09:14:52 +08:00
Junyan Qin
b821b69dbb chore: perf issue templates 2025-05-20 09:13:13 +08:00
Junyan Qin
7e2448655e chore: add english issue templates 2025-05-20 09:11:47 +08:00
Junyan Qin (Chin)
a7d2a68639 feat: add supports for testing llm models (#1454)
* feat: add supports for testing llm models

* fix: linter error
2025-05-19 23:10:04 +08:00
fdc310
aba51409a7 feat:add qoute message process and add Whether to enable this function (#1446)
* 更新了wechatpad接口,以及适配器

* 更新了wechatpad接口,以及适配器

* 修复一些细节问题,比如at回复,以及启动登录和启动ws长连接的线程同步

* importutil中修复了在wi上启动替换斜杠问题,login中加上了一个login,暂时没啥用。wechatpad中做出了一些细节修改

* 更新了wechatpad接口,以及适配器

* 怎加了处理图片链接转换为image_base64发送

* feat(wechatpad): 调整日志+bugfix

* feat(wechatpad): fix typo

* 修正了发送语音api参数错误,添加了发送链接处理为base64数据(好像只有一部分链接可以)

* 修复了部分手抽的typo错误

* chore: remove manager.py

* feat:add qoute message process and add Whether to enable this function

* chore: add db migration for this change

---------

Co-authored-by: shinelin <shinelinxx@gmail.com>
Co-authored-by: Junyan Qin (Chin) <rockchinq@gmail.com>
2025-05-19 22:24:18 +08:00
sheetung
5e5d37cbf1 St/webui (#1452)
* 解决webUI模型配置页面卡片溢出问题

* fix: webUI卡片文本溢出问题
2025-05-19 18:11:50 +08:00
sheetung
e5a99a0fe4 解决webUI模型配置页面卡片溢出问题 (#1451) 2025-05-19 13:14:39 +08:00
Junyan Qin
a594cc07f6 chore: release v4.0.3.1 2025-05-19 10:31:11 +08:00
Junyan Qin
0a9714fbe7 perf: no cache for fronend page 2025-05-17 19:30:26 +08:00
Junyan Qin (Chin)
1992934dce fix: user_funcs typo in ollama chat requester (#1431) 2025-05-15 20:51:58 +08:00
zejiewang
bb930aec14 fix:lark adapter listeners init problem (#1426)
Co-authored-by: wangzejie <wangzejie@meicai.cn>
2025-05-15 11:25:38 +08:00
Junyan Qin
1d7f2ab701 fix: wrong ref in HomeTitleBar 2025-05-15 10:54:22 +08:00
Junyan Qin
347da6142e perf: multi language 2025-05-15 10:40:36 +08:00
Junyan Qin
a9f4dc517a perf: remove -q params in plugin deps precheking 2025-05-15 10:24:53 +08:00
Junyan Qin (Chin)
9d45f3f3a7 updatr README.md 2025-05-15 09:04:38 +08:00
Guanchao Wang
256d24718b fix: dingtalk & wecom problems (#1424) 2025-05-14 22:55:16 +08:00
Junyan Qin
1272b8ef16 ci: update Dockerfile python version 2025-05-14 22:22:17 +08:00
46 changed files with 542 additions and 156 deletions

View File

@@ -1,5 +1,5 @@
name: 漏洞反馈 name: 漏洞反馈
description: 报错或漏洞请使用这个模板创建不使用此模板创建的异常、漏洞相关issue将被直接关闭。由于自己操作不当/不甚了解所用技术栈引起的网络连接问题恕无法解决,请勿提 issue。容器间网络连接问题参考文档 https://docs.langbot.app/deploy/network-details.html description: 【供中文用户】报错或漏洞请使用这个模板创建不使用此模板创建的异常、漏洞相关issue将被直接关闭。由于自己操作不当/不甚了解所用技术栈引起的网络连接问题恕无法解决,请勿提 issue。容器间网络连接问题参考文档 https://docs.langbot.app/zh/workshop/network-details.html
title: "[Bug]: " title: "[Bug]: "
labels: ["bug?"] labels: ["bug?"]
body: body:
@@ -7,7 +7,7 @@ body:
attributes: attributes:
label: 运行环境 label: 运行环境
description: LangBot 版本、操作系统、系统架构、**Python版本**、**主机地理位置** description: LangBot 版本、操作系统、系统架构、**Python版本**、**主机地理位置**
placeholder: 例如v3.3.0、CentOS x64 Python 3.10.3、Docker 的系统直接写 Docker 就行 placeholder: 例如v3.3.0、CentOS x64 Python 3.10.3、Docker
validations: validations:
required: true required: true
- type: textarea - type: textarea
@@ -19,12 +19,12 @@ body:
- type: textarea - type: textarea
attributes: attributes:
label: 复现步骤 label: 复现步骤
description: 如何重现这个问题,越详细越好;请贴上所有相关的配置文件和元数据文件(注意隐去敏感信息) description: 如何重现这个问题,越详细越好;提供越多信息,我们会越快解决问题。
validations: validations:
required: true required: false
- type: textarea - type: textarea
attributes: attributes:
label: 启用的插件 label: 启用的插件
description: 有些情况可能和插件功能有关,建议提供插件启用情况。可以使用`!plugin`命令查看已启用的插件 description: 有些情况可能和插件功能有关,建议提供插件启用情况。
validations: validations:
required: false required: false

View File

@@ -0,0 +1,30 @@
name: Bug report
description: Report bugs or vulnerabilities using this template. For container network connection issues, refer to the documentation https://docs.langbot.app/en/workshop/network-details.html
title: "[Bug]: "
labels: ["bug?"]
body:
- type: input
attributes:
label: Runtime environment
description: LangBot version, operating system, system architecture, **Python version**, **host location**
placeholder: "For example: v3.3.0, CentOS x64 Python 3.10.3, Docker"
validations:
required: true
- type: textarea
attributes:
label: Exception
description: Describe the exception in detail, what happened and when it happened. **Please include log information.**
validations:
required: true
- type: textarea
attributes:
label: Reproduction steps
description: How to reproduce this problem, the more detailed the better; the more information you provide, the faster we will solve the problem.
validations:
required: false
- type: textarea
attributes:
label: Enabled plugins
description: Some cases may be related to plugin functionality, so please provide the plugin enablement status.
validations:
required: false

View File

@@ -1,7 +1,7 @@
name: 需求建议 name: 需求建议
title: "[Feature]: " title: "[Feature]: "
labels: ["改进"] labels: []
description: "新功能或现有功能优化请使用这个模板不符合类别的issue将被直接关闭" description: "【供中文用户】新功能或现有功能优化请使用这个模板不符合类别的issue将被直接关闭"
body: body:
- type: dropdown - type: dropdown
attributes: attributes:

View File

@@ -0,0 +1,21 @@
name: Feature request
title: "[Feature]: "
labels: []
description: "New features or existing feature improvements should use this template; issues that do not match will be closed directly"
body:
- type: dropdown
attributes:
label: This is a?
description: New feature request or existing feature improvement
options:
- New feature
- Existing feature improvement
validations:
required: true
- type: textarea
attributes:
label: Detailed description
description: Detailed description, the more detailed the better
validations:
required: true

View File

@@ -1,7 +1,7 @@
name: 提交新插件 name: 提交新插件
title: "[Plugin]: 请求登记新插件" title: "[Plugin]: 请求登记新插件"
labels: ["独立插件"] labels: ["独立插件"]
description: "本模板供且仅供提交新插件使用" description: "【供中文用户】本模板供且仅供提交新插件使用"
body: body:
- type: input - type: input
attributes: attributes:

View File

@@ -0,0 +1,24 @@
name: Submit a new plugin
title: "[Plugin]: Request to register a new plugin"
labels: ["Independent Plugin"]
description: "This template is only for submitting new plugins"
body:
- type: input
attributes:
label: Plugin name
description: Fill in the name of the plugin
validations:
required: true
- type: textarea
attributes:
label: Plugin code repository address
description: Only support Github
validations:
required: true
- type: textarea
attributes:
label: Plugin description
description: The description of the plugin
validations:
required: true

View File

@@ -1,20 +1,21 @@
## 概述 ## 概述 / Overview
实现/解决/优化的内容: > 请在此部分填写你实现/解决/优化的内容:
> Summary of what you implemented/solved/optimized:
## 检查清单 ## 检查清单 / Checklist
### PR 作者完成 ### PR 作者完成 / For PR author
*请在方括号间写`x`以打勾 *请在方括号间写`x`以打勾 / Please tick the box with `x`*
- [ ] 阅读仓库[贡献指引](https://github.com/RockChinQ/LangBot/blob/master/CONTRIBUTING.md)了吗? - [ ] 阅读仓库[贡献指引](https://github.com/RockChinQ/LangBot/blob/master/CONTRIBUTING.md)了吗? / Have you read the [contribution guide](https://github.com/RockChinQ/LangBot/blob/master/CONTRIBUTING.md)?
- [ ] 与项目所有者沟通过了吗? - [ ] 与项目所有者沟通过了吗? / Have you communicated with the project maintainer?
- [ ] 我确定已自行测试所作的更改,确保功能符合预期。 - [ ] 我确定已自行测试所作的更改,确保功能符合预期。 / I have tested the changes and ensured they work as expected.
### 项目所有者完成 ### 项目维护者完成 / For project maintainer
- [ ] 相关 issues 链接了吗? - [ ] 相关 issues 链接了吗? / Have you linked the related issues?
- [ ] 配置项写好了吗?迁移写好了吗?生效了吗? - [ ] 配置项写好了吗?迁移写好了吗?生效了吗? / Have you written the configuration items? Have you written the migration? Has it taken effect?
- [ ] 依赖加到 pyproject.toml 和 core/bootutils/deps.py 了吗 - [ ] 依赖加到 pyproject.toml 和 core/bootutils/deps.py 了吗 / Have you added the dependencies to pyproject.toml and core/bootutils/deps.py?
- [ ] 文档编写了吗? - [ ] 文档编写了吗? / Have you written the documentation?

View File

@@ -5,22 +5,27 @@
### 贡献形式 ### 贡献形式
- 提交PR解决issues中提到的bug或期待的功能 - 提交PR解决issues中提到的bug或期待的功能
- 提交PR实现您设想的功能请先提出issue与者沟通) - 提交PR实现您设想的功能请先提出issue与项目维护者沟通)
- 优化代码架构,使各个模块的组织更加整洁优雅
- 在issues中提出发现的bug或者期待的功能
- 为本项目在其他社交平台撰写文章、制作视频等 - 为本项目在其他社交平台撰写文章、制作视频等
- 为本项目的衍生项目作出贡献,或开发插件增加功能 - 为本项目的衍生项目作出贡献,或开发插件增加功能
### 如何开始 ### 沟通语言规范
- 加入本项目交流群,一同探讨项目相关事务 - 在 PR 和 Commit Message 中请使用全英文
- 解决本项目或衍生项目的issues中亟待解决的问题 - 对于中文用户issue 中可以使用中文
- 阅读并完善本项目文档
- 在各个社交媒体撰写本项目教程等
### 代码规范 <hr/>
- 代码中的注解`务必`符合Google风格的规范 ## Guidelines
- 模块顶部的引入代码请遵循`系统模块``第三方库模块``自定义模块`的顺序进行引入
- `不要`直接引入模块的特定属性,而是引入这个模块,再通过`xxx.yyy`的形式使用属性 ### Contribution
- 任何作用域的字段`必须`先声明后使用,并在声明处注明类型提示
- Submit PRs to solve bugs or features in the issues
- Submit PRs to implement your ideas (Please create an issue first and communicate with the project maintainer)
- Write articles or make videos about this project on other social platforms
- Contribute to the development of derivative projects, or develop plugins to add features
### Spoken Language
- Use English in PRs and Commit Messages
- For English users, you can use English in issues

View File

@@ -6,7 +6,7 @@ COPY web ./web
RUN cd web && npm install && npm run build RUN cd web && npm install && npm run build
FROM python:3.10.13-slim FROM python:3.12.7-slim
WORKDIR /app WORKDIR /app

View File

@@ -32,6 +32,8 @@
</p> </p>
> 近期 GeWeChat 项目归档,我们已经适配 WeChatPad 协议端,个微恢复正常使用,详情请查看文档。
## ✨ 特性 ## ✨ 特性
- 💬 大模型对话、Agent支持多种大模型适配群聊和私聊具有多轮对话、工具调用、多模态能力并深度适配 [Dify](https://dify.ai)。目前支持 QQ、QQ频道、企业微信、个人微信、飞书、Discord、Telegram 等平台。 - 💬 大模型对话、Agent支持多种大模型适配群聊和私聊具有多轮对话、工具调用、多模态能力并深度适配 [Dify](https://dify.ai)。目前支持 QQ、QQ频道、企业微信、个人微信、飞书、Discord、Telegram 等平台。

View File

@@ -115,13 +115,20 @@ class DingTalkClient:
if event: if event:
await self._handle_message(event) await self._handle_message(event)
async def send_message(self, content: str, incoming_message): async def send_message(self, content: str, incoming_message,at:bool):
if self.markdown_card: if self.markdown_card:
self.EchoTextHandler.reply_markdown( if at:
title=self.robot_name + '的回答', self.EchoTextHandler.reply_markdown(
text=content, title='@'+incoming_message.sender_nick+' '+content,
incoming_message=incoming_message, text='@'+incoming_message.sender_nick+' '+content,
) incoming_message=incoming_message,
)
else:
self.EchoTextHandler.reply_markdown(
title=content,
text=content,
incoming_message=incoming_message,
)
else: else:
self.EchoTextHandler.reply_text(content, incoming_message) self.EchoTextHandler.reply_text(content, incoming_message)

View File

@@ -29,7 +29,6 @@ class WecomClient:
self.access_token = '' self.access_token = ''
self.secret_for_contacts = contacts_secret self.secret_for_contacts = contacts_secret
self.app = Quart(__name__) self.app = Quart(__name__)
self.wxcpt = WXBizMsgCrypt(self.token, self.aes, self.corpid)
self.app.add_url_rule( self.app.add_url_rule(
'/callback/command', '/callback/command',
'handle_callback', 'handle_callback',
@@ -171,16 +170,17 @@ class WecomClient:
timestamp = request.args.get('timestamp') timestamp = request.args.get('timestamp')
nonce = request.args.get('nonce') nonce = request.args.get('nonce')
wxcpt = WXBizMsgCrypt(self.token, self.aes, self.corpid)
if request.method == 'GET': if request.method == 'GET':
echostr = request.args.get('echostr') echostr = request.args.get('echostr')
ret, reply_echo_str = self.wxcpt.VerifyURL(msg_signature, timestamp, nonce, echostr) ret, reply_echo_str = wxcpt.VerifyURL(msg_signature, timestamp, nonce, echostr)
if ret != 0: if ret != 0:
raise Exception(f'验证失败,错误码: {ret}') raise Exception(f'验证失败,错误码: {ret}')
return reply_echo_str return reply_echo_str
elif request.method == 'POST': elif request.method == 'POST':
encrypt_msg = await request.data encrypt_msg = await request.data
ret, xml_msg = self.wxcpt.DecryptMsg(encrypt_msg, msg_signature, timestamp, nonce) ret, xml_msg = wxcpt.DecryptMsg(encrypt_msg, msg_signature, timestamp, nonce)
if ret != 0: if ret != 0:
raise Exception(f'消息解密失败,错误码: {ret}') raise Exception(f'消息解密失败,错误码: {ret}')

14
main.py
View File

@@ -10,8 +10,8 @@ asciiart = r"""
|____\__,_|_||_\__, |___/\___/\__| |____\__,_|_||_\__, |___/\___/\__|
|___/ |___/
⭐️开源地址: https://github.com/RockChinQ/LangBot ⭐️ Open Source 开源地址: https://github.com/RockChinQ/LangBot
📖文档地址: https://docs.langbot.app 📖 Documentation 文档地址: https://docs.langbot.app
""" """
@@ -28,10 +28,14 @@ async def main_entry(loop: asyncio.AbstractEventLoop):
if missing_deps: if missing_deps:
print('以下依赖包未安装,将自动安装,请完成后重启程序:') print('以下依赖包未安装,将自动安装,请完成后重启程序:')
print(
'These dependencies are missing, they will be installed automatically, please restart the program after completion:'
)
for dep in missing_deps: for dep in missing_deps:
print('-', dep) print('-', dep)
await deps.install_deps(missing_deps) await deps.install_deps(missing_deps)
print('已自动安装缺失的依赖包,请重启程序。') print('已自动安装缺失的依赖包,请重启程序。')
print('The missing dependencies have been installed automatically, please restart the program.')
sys.exit(0) sys.exit(0)
# check plugin deps # check plugin deps
@@ -53,6 +57,7 @@ async def main_entry(loop: asyncio.AbstractEventLoop):
if generated_files: if generated_files:
print('以下文件不存在,已自动生成:') print('以下文件不存在,已自动生成:')
print('Following files do not exist and have been automatically generated:')
for file in generated_files: for file in generated_files:
print('-', file) print('-', file)
@@ -69,9 +74,10 @@ if __name__ == '__main__':
if sys.version_info < (3, 10, 1): if sys.version_info < (3, 10, 1):
print('需要 Python 3.10.1 及以上版本,当前 Python 版本为:', sys.version) print('需要 Python 3.10.1 及以上版本,当前 Python 版本为:', sys.version)
input('按任意键退出...') input('按任意键退出...')
print('Your Python version is not supported. Please exit the program by pressing any key.')
exit(1) exit(1)
# 检查本目录是否有main.py且包含LangBot字符串 # Check if the current directory is the LangBot project root directory
invalid_pwd = False invalid_pwd = False
if not os.path.exists('main.py'): if not os.path.exists('main.py'):
@@ -84,6 +90,8 @@ if __name__ == '__main__':
if invalid_pwd: if invalid_pwd:
print('请在 LangBot 项目根目录下以命令形式运行此程序。') print('请在 LangBot 项目根目录下以命令形式运行此程序。')
input('按任意键退出...') input('按任意键退出...')
print('Please run this program in the LangBot project root directory in command form.')
print('Press any key to exit...')
exit(1) exit(1)
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()

View File

@@ -36,3 +36,11 @@ class LLMModelsRouterGroup(group.RouterGroup):
await self.ap.model_service.delete_llm_model(model_uuid) await self.ap.model_service.delete_llm_model(model_uuid)
return self.success() return self.success()
@self.route('/<model_uuid>/test', methods=['POST'])
async def _(model_uuid: str) -> str:
json_data = await quart.request.json
await self.ap.model_service.test_llm_model(model_uuid, json_data)
return self.success()

View File

@@ -107,4 +107,8 @@ class HTTPController:
elif path.endswith('.txt'): elif path.endswith('.txt'):
mimetype = 'text/plain' mimetype = 'text/plain'
return await quart.send_from_directory(frontend_path, path, mimetype=mimetype) response = await quart.send_from_directory(frontend_path, path, mimetype=mimetype)
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response

View File

@@ -6,6 +6,8 @@ import sqlalchemy
from ....core import app from ....core import app
from ....entity.persistence import model as persistence_model from ....entity.persistence import model as persistence_model
from ....entity.persistence import pipeline as persistence_pipeline from ....entity.persistence import pipeline as persistence_pipeline
from ....provider.modelmgr import requester as model_requester
from ....provider import entities as llm_entities
class ModelsService: class ModelsService:
@@ -78,3 +80,26 @@ class ModelsService:
) )
await self.ap.model_mgr.remove_llm_model(model_uuid) await self.ap.model_mgr.remove_llm_model(model_uuid)
async def test_llm_model(self, model_uuid: str, model_data: dict) -> None:
runtime_llm_model: model_requester.RuntimeLLMModel | None = None
if model_uuid != '_':
for model in self.ap.model_mgr.llm_models:
if model.model_entity.uuid == model_uuid:
runtime_llm_model = model
break
if runtime_llm_model is None:
raise Exception('model not found')
else:
runtime_llm_model = await self.ap.model_mgr.init_runtime_llm_model(model_data)
await runtime_llm_model.requester.invoke_llm(
query=None,
model=runtime_llm_model,
messages=[llm_entities.Message(role='user', content='Hello, world!')],
funcs=[],
extra_args={},
)

View File

@@ -23,7 +23,7 @@ from ..api.http.service import model as model_service
from ..api.http.service import pipeline as pipeline_service from ..api.http.service import pipeline as pipeline_service
from ..api.http.service import bot as bot_service from ..api.http.service import bot as bot_service
from ..discover import engine as discover_engine from ..discover import engine as discover_engine
from ..utils import logcache, ip from ..utils import logcache
from . import taskmgr from . import taskmgr
from . import entities as core_entities from . import entities as core_entities
@@ -158,28 +158,24 @@ class Application:
"""打印访问 webui 的提示""" """打印访问 webui 的提示"""
if not os.path.exists(os.path.join('.', 'web/out')): if not os.path.exists(os.path.join('.', 'web/out')):
self.logger.warning('WebUI 文件缺失,请根据文档获取https://docs.langbot.app/webui/intro.html') self.logger.warning('WebUI 文件缺失,请根据文档部署https://docs.langbot.app/zh')
self.logger.warning(
'WebUI files are missing, please deploy according to the documentation: https://docs.langbot.app/en'
)
return return
host_ip = '127.0.0.1' host_ip = '127.0.0.1'
public_ip = await ip.get_myip()
port = self.instance_config.data['api']['port'] port = self.instance_config.data['api']['port']
tips = f""" tips = f"""
======================================= =======================================
您可通过以下方式访问管理面板 Access WebUI / 访问管理面板
🏠 本地地址:http://{host_ip}:{port}/ 🏠 Local Address: http://{host_ip}:{port}/
🌐 公网地址http://{public_ip}:{port}/ 🌐 Public Address: http://<Your Public IP>:{port}/
📌 如果您在容器中运行此程序,请确保容器的 {port} 端口已对外暴露 📌 Running this program in a container? Please ensure that the {port} port is exposed
🔗 若要使用公网地址访问,请阅读以下须知
1. 公网地址仅供参考,请以您的主机公网 IP 为准;
2. 要使用公网地址访问,请确保您的主机具有公网 IP并且系统防火墙已放行 {port} 端口;
🤯 WebUI 仍处于 Beta 测试阶段,如有问题或建议请反馈到 https://github.com/RockChinQ/LangBot/issues
======================================= =======================================
""".strip() """.strip()
for line in tips.split('\n'): for line in tips.split('\n'):

View File

@@ -74,5 +74,5 @@ async def precheck_plugin_deps():
if 'requirements.txt' in os.listdir(subdir): if 'requirements.txt' in os.listdir(subdir):
pkgmgr.install_requirements( pkgmgr.install_requirements(
os.path.join(subdir, 'requirements.txt'), os.path.join(subdir, 'requirements.txt'),
extra_params=['-q', '-q', '-q'], extra_params=[],
) )

View File

@@ -0,0 +1,36 @@
from .. import migration
import sqlalchemy
from ...entity.persistence import pipeline as persistence_pipeline
@migration.migration_class(2)
class DBMigrateCombineQuoteMsgConfig(migration.DBMigration):
"""引用消息合并配置"""
async def upgrade(self):
"""升级"""
# read all pipelines
pipelines = await self.ap.persistence_mgr.execute_async(sqlalchemy.select(persistence_pipeline.LegacyPipeline))
for pipeline in pipelines:
serialized_pipeline = self.ap.persistence_mgr.serialize_model(persistence_pipeline.LegacyPipeline, pipeline)
config = serialized_pipeline['config']
if 'misc' not in config['trigger']:
config['trigger']['misc'] = {}
if 'combine-quote-message' not in config['trigger']['misc']:
config['trigger']['misc']['combine-quote-message'] = False
await self.ap.persistence_mgr.execute_async(
sqlalchemy.update(persistence_pipeline.LegacyPipeline)
.where(persistence_pipeline.LegacyPipeline.uuid == serialized_pipeline['uuid'])
.values({'config': config, 'for_version': self.ap.ver_mgr.get_current_version()})
)
async def downgrade(self):
"""降级"""
pass

View File

@@ -34,6 +34,7 @@ class PreProcessor(stage.PipelineStage):
session = await self.ap.sess_mgr.get_session(query) session = await self.ap.sess_mgr.get_session(query)
# 非 local-agent 时llm_model 为 None # 非 local-agent 时llm_model 为 None
llm_model = ( llm_model = (
await self.ap.model_mgr.get_model_by_uuid(query.pipeline_config['ai']['local-agent']['model']) await self.ap.model_mgr.get_model_by_uuid(query.pipeline_config['ai']['local-agent']['model'])
@@ -81,6 +82,7 @@ class PreProcessor(stage.PipelineStage):
content_list = [] content_list = []
plain_text = '' plain_text = ''
qoute_msg = query.pipeline_config["trigger"].get("misc",'').get("combine-quote-message")
for me in query.message_chain: for me in query.message_chain:
if isinstance(me, platform_message.Plain): if isinstance(me, platform_message.Plain):
@@ -92,6 +94,18 @@ class PreProcessor(stage.PipelineStage):
): ):
if me.base64 is not None: if me.base64 is not None:
content_list.append(llm_entities.ContentElement.from_image_base64(me.base64)) content_list.append(llm_entities.ContentElement.from_image_base64(me.base64))
elif isinstance(me, platform_message.Quote) and qoute_msg:
for msg in me.origin:
if isinstance(msg, platform_message.Plain):
content_list.append(llm_entities.ContentElement.from_text(msg.text))
elif isinstance(msg, platform_message.Image):
if selected_runner != 'local-agent' or query.use_llm_model.model_entity.abilities.__contains__(
'vision'
):
if msg.base64 is not None:
content_list.append(llm_entities.ContentElement.from_image_base64(msg.base64))
query.variables['user_message_text'] = plain_text query.variables['user_message_text'] = plain_text

View File

@@ -15,6 +15,7 @@ from ...utils import image
class AiocqhttpMessageConverter(adapter.MessageConverter): class AiocqhttpMessageConverter(adapter.MessageConverter):
@staticmethod @staticmethod
async def yiri2target( async def yiri2target(
message_chain: platform_message.MessageChain, message_chain: platform_message.MessageChain,
@@ -66,14 +67,40 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
return msg_list, msg_id, msg_time return msg_list, msg_id, msg_time
@staticmethod @staticmethod
async def target2yiri(message: str, message_id: int = -1): async def target2yiri(message: str, message_id: int = -1,bot=None):
message = aiocqhttp.Message(message) message = aiocqhttp.Message(message)
async def process_message_data(msg_data, reply_list):
if msg_data["type"] == "image":
image_base64, image_format = await image.qq_image_url_to_base64(msg_data["data"]['url'])
reply_list.append(
platform_message.Image(base64=f'data:image/{image_format};base64,{image_base64}'))
elif msg_data["type"] == "text":
reply_list.append(platform_message.Plain(text=msg_data["data"]["text"]))
elif msg_data["type"] == "forward": # 这里来应该传入转发消息组暂时传入qoute
for forward_msg_datas in msg_data["data"]["content"]:
for forward_msg_data in forward_msg_datas["message"]:
await process_message_data(forward_msg_data, reply_list)
elif msg_data["type"] == "at":
if msg_data["data"]['qq'] == 'all':
reply_list.append(platform_message.AtAll())
else:
reply_list.append(
platform_message.At(
target=msg_data["data"]['qq'],
)
)
yiri_msg_list = [] yiri_msg_list = []
yiri_msg_list.append(platform_message.Source(id=message_id, time=datetime.datetime.now())) yiri_msg_list.append(platform_message.Source(id=message_id, time=datetime.datetime.now()))
for msg in message: for msg in message:
reply_list = []
if msg.type == 'at': if msg.type == 'at':
if msg.data['qq'] == 'all': if msg.data['qq'] == 'all':
yiri_msg_list.append(platform_message.AtAll()) yiri_msg_list.append(platform_message.AtAll())
@@ -88,20 +115,46 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
elif msg.type == 'image': elif msg.type == 'image':
image_base64, image_format = await image.qq_image_url_to_base64(msg.data['url']) image_base64, image_format = await image.qq_image_url_to_base64(msg.data['url'])
yiri_msg_list.append(platform_message.Image(base64=f'data:image/{image_format};base64,{image_base64}')) yiri_msg_list.append(platform_message.Image(base64=f'data:image/{image_format};base64,{image_base64}'))
elif msg.type == 'forward':
# 暂时不太合理
# msg_datas = await bot.get_msg(message_id=message_id)
# print(msg_datas)
# for msg_data in msg_datas["message"]:
# await process_message_data(msg_data, yiri_msg_list)
pass
elif msg.type == 'reply': # 此处处理引用消息传入Qoute
msg_datas = await bot.get_msg(message_id=msg.data["id"])
for msg_data in msg_datas["message"]:
await process_message_data(msg_data, reply_list)
reply_msg = platform_message.Quote(message_id=msg.data["id"],sender_id=msg_datas["user_id"],origin=reply_list)
yiri_msg_list.append(reply_msg)
chain = platform_message.MessageChain(yiri_msg_list) chain = platform_message.MessageChain(yiri_msg_list)
return chain return chain
class AiocqhttpEventConverter(adapter.EventConverter): class AiocqhttpEventConverter(adapter.EventConverter):
@staticmethod @staticmethod
async def yiri2target(event: platform_events.MessageEvent, bot_account_id: int): async def yiri2target(event: platform_events.MessageEvent, bot_account_id: int):
return event.source_platform_object return event.source_platform_object
@staticmethod @staticmethod
async def target2yiri(event: aiocqhttp.Event): async def target2yiri(event: aiocqhttp.Event,bot=None):
yiri_chain = await AiocqhttpMessageConverter.target2yiri(event.message, event.message_id) yiri_chain = await AiocqhttpMessageConverter.target2yiri(event.message, event.message_id,bot)
if event.message_type == 'group': if event.message_type == 'group':
permission = 'MEMBER' permission = 'MEMBER'
@@ -205,7 +258,7 @@ class AiocqhttpAdapter(adapter.MessagePlatformAdapter):
async def on_message(event: aiocqhttp.Event): async def on_message(event: aiocqhttp.Event):
self.bot_account_id = event.self_id self.bot_account_id = event.self_id
try: try:
return await callback(await self.event_converter.target2yiri(event), self) return await callback(await self.event_converter.target2yiri(event,self.bot), self)
except Exception: except Exception:
traceback.print_exc() traceback.print_exc()

View File

@@ -14,9 +14,14 @@ import datetime
class DingTalkMessageConverter(adapter.MessageConverter): class DingTalkMessageConverter(adapter.MessageConverter):
@staticmethod @staticmethod
async def yiri2target(message_chain: platform_message.MessageChain): async def yiri2target(message_chain: platform_message.MessageChain):
content = ''
at = False
for msg in message_chain: for msg in message_chain:
if type(msg) is platform_message.At:
at = True
if type(msg) is platform_message.Plain: if type(msg) is platform_message.Plain:
return msg.text content += msg.text
return content,at
@staticmethod @staticmethod
async def target2yiri(event: DingTalkEvent, bot_name: str): async def target2yiri(event: DingTalkEvent, bot_name: str):
@@ -128,8 +133,8 @@ class DingTalkAdapter(adapter.MessagePlatformAdapter):
) )
incoming_message = event.incoming_message incoming_message = event.incoming_message
content = await DingTalkMessageConverter.yiri2target(message) content,at = await DingTalkMessageConverter.yiri2target(message)
await self.bot.send_message(content, incoming_message) await self.bot.send_message(content, incoming_message,at)
async def send_message(self, target_type: str, target_id: str, message: platform_message.MessageChain): async def send_message(self, target_type: str, target_id: str, message: platform_message.MessageChain):
content = await DingTalkMessageConverter.yiri2target(message) content = await DingTalkMessageConverter.yiri2target(message)

View File

@@ -332,7 +332,7 @@ class LarkAdapter(adapter.MessagePlatformAdapter):
listeners: typing.Dict[ listeners: typing.Dict[
typing.Type[platform_events.Event], typing.Type[platform_events.Event],
typing.Callable[[platform_events.Event, adapter.MessagePlatformAdapter], None], typing.Callable[[platform_events.Event, adapter.MessagePlatformAdapter], None],
] = {} ]
config: dict config: dict
quart_app: quart.Quart quart_app: quart.Quart
@@ -342,6 +342,7 @@ class LarkAdapter(adapter.MessagePlatformAdapter):
self.config = config self.config = config
self.ap = ap self.ap = ap
self.quart_app = quart.Quart(__name__) self.quart_app = quart.Quart(__name__)
self.listeners = {}
@self.quart_app.route('/lark/callback', methods=['POST']) @self.quart_app.route('/lark/callback', methods=['POST'])
async def lark_callback(): async def lark_callback():

View File

@@ -4,9 +4,6 @@ import sqlalchemy
from . import entities, requester from . import entities, requester
from ...core import app from ...core import app
from ...core import entities as core_entities
from .. import entities as llm_entities
from ..tools import entities as tools_entities
from ...discover import engine from ...discover import engine
from . import token from . import token
from ...entity.persistence import model as persistence_model from ...entity.persistence import model as persistence_model
@@ -69,12 +66,11 @@ class ModelManager:
for llm_model in llm_models: for llm_model in llm_models:
await self.load_llm_model(llm_model) await self.load_llm_model(llm_model)
async def load_llm_model( async def init_runtime_llm_model(
self, self,
model_info: persistence_model.LLMModel | sqlalchemy.Row[persistence_model.LLMModel] | dict, model_info: persistence_model.LLMModel | sqlalchemy.Row[persistence_model.LLMModel] | dict,
): ):
"""加载模型""" """初始化运行时模型"""
if isinstance(model_info, sqlalchemy.Row): if isinstance(model_info, sqlalchemy.Row):
model_info = persistence_model.LLMModel(**model_info._mapping) model_info = persistence_model.LLMModel(**model_info._mapping)
elif isinstance(model_info, dict): elif isinstance(model_info, dict):
@@ -92,6 +88,15 @@ class ModelManager:
), ),
requester=requester_inst, requester=requester_inst,
) )
return runtime_llm_model
async def load_llm_model(
self,
model_info: persistence_model.LLMModel | sqlalchemy.Row[persistence_model.LLMModel] | dict,
):
"""加载模型"""
runtime_llm_model = await self.init_runtime_llm_model(model_info)
self.llm_models.append(runtime_llm_model) self.llm_models.append(runtime_llm_model)
async def get_model_by_name(self, name: str) -> entities.LLMModelInfo: # deprecated async def get_model_by_name(self, name: str) -> entities.LLMModelInfo: # deprecated
@@ -132,12 +137,3 @@ class ModelManager:
if component.metadata.name == name: if component.metadata.name == name:
return component return component
return None return None
async def invoke_llm(
self,
query: core_entities.Query,
model_uuid: str,
messages: list[llm_entities.Message],
funcs: list[tools_entities.LLMFunction] = None,
) -> llm_entities.Message:
pass

View File

@@ -42,7 +42,7 @@ class OllamaChatCompletions(requester.LLMAPIRequester):
query: core_entities.Query, query: core_entities.Query,
req_messages: list[dict], req_messages: list[dict],
use_model: requester.RuntimeLLMModel, use_model: requester.RuntimeLLMModel,
user_funcs: list[tools_entities.LLMFunction] = None, use_funcs: list[tools_entities.LLMFunction] = None,
extra_args: dict[str, typing.Any] = {}, extra_args: dict[str, typing.Any] = {},
) -> llm_entities.Message: ) -> llm_entities.Message:
args = extra_args.copy() args = extra_args.copy()
@@ -67,8 +67,8 @@ class OllamaChatCompletions(requester.LLMAPIRequester):
args['messages'] = messages args['messages'] = messages
args['tools'] = [] args['tools'] = []
if user_funcs: if use_funcs:
tools = await self.ap.tool_mgr.generate_tools_for_openai(user_funcs) tools = await self.ap.tool_mgr.generate_tools_for_openai(use_funcs)
if tools: if tools:
args['tools'] = tools args['tools'] = tools

View File

@@ -93,6 +93,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
async def _chat_messages(self, query: core_entities.Query) -> typing.AsyncGenerator[llm_entities.Message, None]: async def _chat_messages(self, query: core_entities.Query) -> typing.AsyncGenerator[llm_entities.Message, None]:
"""调用聊天助手""" """调用聊天助手"""
cov_id = query.session.using_conversation.uuid or '' cov_id = query.session.using_conversation.uuid or ''
query.variables['conversation_id'] = cov_id
plain_text, image_ids = await self._preprocess_user_message(query) plain_text, image_ids = await self._preprocess_user_message(query)
@@ -155,6 +156,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
) -> typing.AsyncGenerator[llm_entities.Message, None]: ) -> typing.AsyncGenerator[llm_entities.Message, None]:
"""调用聊天助手""" """调用聊天助手"""
cov_id = query.session.using_conversation.uuid or '' cov_id = query.session.using_conversation.uuid or ''
query.variables['conversation_id'] = cov_id
plain_text, image_ids = await self._preprocess_user_message(query) plain_text, image_ids = await self._preprocess_user_message(query)

View File

@@ -1,6 +1,6 @@
semantic_version = 'v4.0.3' semantic_version = 'v4.0.3.3'
required_database_version = 1 required_database_version = 2
"""标记本版本所需要的数据库结构版本,用于判断数据库迁移""" """标记本版本所需要的数据库结构版本,用于判断数据库迁移"""
debug_mode = False debug_mode = False

View File

@@ -1,10 +0,0 @@
import aiohttp
async def get_myip() -> str:
try:
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=10)) as session:
async with session.get('https://ip.useragentinfo.com/myip') as response:
return await response.text()
except Exception:
return '0.0.0.0'

View File

@@ -199,9 +199,9 @@ class VersionManager:
try: try:
if await self.ap.ver_mgr.is_new_version_available(): if await self.ap.ver_mgr.is_new_version_available():
return ( return (
'有新版本可用,根据文档更新https://docs.langbot.app/deploy/update.html', 'New version available:\n有新版本可用,根据文档更新: \nhttps://docs.langbot.app/zh/deploy/update.html',
logging.INFO, logging.INFO,
) )
except Exception as e: except Exception as e:
return f'检查版本更新时出错: {e}', logging.WARNING return f'Error checking version update: {e}', logging.WARNING

View File

@@ -16,6 +16,9 @@
"ignore-rules": { "ignore-rules": {
"prefix": [], "prefix": [],
"regexp": [] "regexp": []
},
"misc": {
"combine-quote-message": true
} }
}, },
"safety": { "safety": {

View File

@@ -46,8 +46,8 @@ stages:
en_US: Random en_US: Random
zh_Hans: 随机 zh_Hans: 随机
description: description:
en_US: The probability of the random response, range from 0.0 to 1.0 en_US: 'Probability of automatically responding to messages that are not matched by other rules. Range: 0.0-1.0 (0%-100%).'
zh_Hans: 随机响应概率范围0.0-1.0,对应 0% 到 100% zh_Hans: '自动响应其他规则未匹配的消息的概率范围0.0-1.0 (0%-100%)。'
type: float type: float
required: false required: false
default: 0 default: 0
@@ -117,3 +117,18 @@ stages:
type: array[string] type: array[string]
required: true required: true
default: [] default: []
- name: misc
label:
en_US: Misc
zh_Hans: 杂项
config:
- name: combine-quote-message
label:
en_US: Combine Quote Message
zh_Hans: 合并引用消息
description:
en_US: If enabled, the bot will combine the quote message with the user's message
zh_Hans: 如果启用,将合并引用消息与用户发送的消息
type: boolean
required: true
default: true

View File

@@ -33,6 +33,8 @@
display: flex; display: flex;
flex-direction: column; flex-direction: column;
gap: 0.2rem; gap: 0.2rem;
min-width: 0;
width: 100%;
} }
.basicInfoNameContainer { .basicInfoNameContainer {
@@ -43,12 +45,18 @@
.basicInfoName { .basicInfoName {
font-size: 1.4rem; font-size: 1.4rem;
font-weight: 500; font-weight: 500;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
} }
.basicInfoDescription { .basicInfoDescription {
font-size: 1rem; font-size: 1rem;
font-weight: 300; font-weight: 300;
color: #b1b1b1; color: #b1b1b1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
} }
.basicInfoAdapterContainer { .basicInfoAdapterContainer {
@@ -88,3 +96,12 @@
font-weight: 500; font-weight: 500;
color: #626262; color: #626262;
} }
.bigText {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-size: 1.4rem;
font-weight: bold;
max-width: 100%;
}

View File

@@ -138,7 +138,18 @@ export default function HomeSidebar({
<SidebarChild <SidebarChild
onClick={() => { onClick={() => {
// open docs.langbot.app // open docs.langbot.app
window.open('https://docs.langbot.app', '_blank'); const language = localStorage.getItem('langbot_language');
if (language === 'zh-Hans') {
window.open(
'https://docs.langbot.app/zh/insight/guide.html',
'_blank',
);
} else {
window.open(
'https://docs.langbot.app/en/insight/guide.html',
'_blank',
);
}
}} }}
isSelected={false} isSelected={false}
icon={ icon={

View File

@@ -1,4 +1,5 @@
import styles from './HomeSidebar.module.css'; import styles from './HomeSidebar.module.css';
import { I18nLabel } from '@/app/infra/entities/common';
export interface ISidebarChildVO { export interface ISidebarChildVO {
id: string; id: string;
@@ -6,7 +7,7 @@ export interface ISidebarChildVO {
name: string; name: string;
route: string; route: string;
description: string; description: string;
helpLink: string; helpLink: I18nLabel;
} }
export class SidebarChildVO { export class SidebarChildVO {
@@ -15,7 +16,7 @@ export class SidebarChildVO {
name: string; name: string;
route: string; route: string;
description: string; description: string;
helpLink: string; helpLink: I18nLabel;
constructor(props: ISidebarChildVO) { constructor(props: ISidebarChildVO) {
this.id = props.id; this.id = props.id;

View File

@@ -22,7 +22,10 @@ export const sidebarConfigList = [
), ),
route: '/home/bots', route: '/home/bots',
description: t('bots.description'), description: t('bots.description'),
helpLink: 'https://docs.langbot.app/zh/deploy/platforms/readme.html', helpLink: {
en_US: 'https://docs.langbot.app/en/deploy/platforms/readme.html',
zh_Hans: 'https://docs.langbot.app/zh/deploy/platforms/readme.html',
},
}), }),
new SidebarChildVO({ new SidebarChildVO({
id: 'models', id: 'models',
@@ -39,7 +42,10 @@ export const sidebarConfigList = [
), ),
route: '/home/models', route: '/home/models',
description: t('models.description'), description: t('models.description'),
helpLink: 'https://docs.langbot.app/zh/deploy/models/readme.html', helpLink: {
en_US: 'https://docs.langbot.app/en/deploy/models/readme.html',
zh_Hans: 'https://docs.langbot.app/zh/deploy/models/readme.html',
},
}), }),
new SidebarChildVO({ new SidebarChildVO({
id: 'pipelines', id: 'pipelines',
@@ -56,7 +62,10 @@ export const sidebarConfigList = [
), ),
route: '/home/pipelines', route: '/home/pipelines',
description: t('pipelines.description'), description: t('pipelines.description'),
helpLink: 'https://docs.langbot.app/zh/deploy/pipelines/readme.html', helpLink: {
en_US: 'https://docs.langbot.app/en/deploy/pipelines/readme.html',
zh_Hans: 'https://docs.langbot.app/zh/deploy/pipelines/readme.html',
},
}), }),
new SidebarChildVO({ new SidebarChildVO({
id: 'plugins', id: 'plugins',
@@ -73,6 +82,9 @@ export const sidebarConfigList = [
), ),
route: '/home/plugins', route: '/home/plugins',
description: t('plugins.description'), description: t('plugins.description'),
helpLink: 'https://docs.langbot.app/zh/plugin/plugin-intro.html', helpLink: {
en_US: 'https://docs.langbot.app/en/plugin/plugin-intro.html',
zh_Hans: 'https://docs.langbot.app/zh/plugin/plugin-intro.html',
},
}), }),
]; ];

View File

@@ -1,4 +1,6 @@
import { i18nObj } from '@/i18n/I18nProvider';
import styles from './HomeTittleBar.module.css'; import styles from './HomeTittleBar.module.css';
import { I18nLabel } from '@/app/infra/entities/common';
export default function HomeTitleBar({ export default function HomeTitleBar({
title, title,
@@ -7,7 +9,7 @@ export default function HomeTitleBar({
}: { }: {
title: string; title: string;
subtitle: string; subtitle: string;
helpLink: string; helpLink: I18nLabel;
}) { }) {
return ( return (
<div className={`${styles.titleBarContainer}`}> <div className={`${styles.titleBarContainer}`}>
@@ -15,7 +17,12 @@ export default function HomeTitleBar({
<div className={`${styles.subtitleText}`}> <div className={`${styles.subtitleText}`}>
{subtitle} {subtitle}
<span className={`${styles.helpLink}`}> <span className={`${styles.helpLink}`}>
<a href={helpLink} target="_blank" rel="noopener noreferrer"> <div
onClick={() => {
window.open(i18nObj(helpLink), '_blank');
}}
className="cursor-pointer"
>
<svg <svg
className="w-[1rem] h-[1rem]" className="w-[1rem] h-[1rem]"
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
@@ -24,7 +31,7 @@ export default function HomeTitleBar({
> >
<path d="M12 22C6.47715 22 2 17.5228 2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22ZM12 20C16.4183 20 20 16.4183 20 12C20 7.58172 16.4183 4 12 4C7.58172 4 4 7.58172 4 12C4 16.4183 7.58172 20 12 20ZM11 15H13V17H11V15ZM13 13.3551V14H11V12.5C11 11.9477 11.4477 11.5 12 11.5C12.8284 11.5 13.5 10.8284 13.5 10C13.5 9.17157 12.8284 8.5 12 8.5C11.2723 8.5 10.6656 9.01823 10.5288 9.70577L8.56731 9.31346C8.88637 7.70919 10.302 6.5 12 6.5C13.933 6.5 15.5 8.067 15.5 10C15.5 11.5855 14.4457 12.9248 13 13.3551Z"></path> <path d="M12 22C6.47715 22 2 17.5228 2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22ZM12 20C16.4183 20 20 16.4183 20 12C20 7.58172 16.4183 4 12 4C7.58172 4 4 7.58172 4 12C4 16.4183 7.58172 20 12 20ZM11 15H13V17H11V15ZM13 13.3551V14H11V12.5C11 11.9477 11.4477 11.5 12 11.5C12.8284 11.5 13.5 10.8284 13.5 10C13.5 9.17157 12.8284 8.5 12 8.5C11.2723 8.5 10.6656 9.01823 10.5288 9.70577L8.56731 9.31346C8.88637 7.70919 10.302 6.5 12 6.5C13.933 6.5 15.5 8.067 15.5 10C15.5 11.5855 14.4457 12.9248 13 13.3551Z"></path>
</svg> </svg>
</a> </div>
</span> </span>
</div> </div>
</div> </div>

View File

@@ -5,6 +5,7 @@ import HomeSidebar from '@/app/home/components/home-sidebar/HomeSidebar';
import HomeTitleBar from '@/app/home/components/home-titlebar/HomeTitleBar'; import HomeTitleBar from '@/app/home/components/home-titlebar/HomeTitleBar';
import React, { useState } from 'react'; import React, { useState } from 'react';
import { SidebarChildVO } from '@/app/home/components/home-sidebar/HomeSidebarChild'; import { SidebarChildVO } from '@/app/home/components/home-sidebar/HomeSidebarChild';
import { I18nLabel } from '@/app/infra/entities/common';
export default function HomeLayout({ export default function HomeLayout({
children, children,
@@ -13,7 +14,10 @@ export default function HomeLayout({
}>) { }>) {
const [title, setTitle] = useState<string>(''); const [title, setTitle] = useState<string>('');
const [subtitle, setSubtitle] = useState<string>(''); const [subtitle, setSubtitle] = useState<string>('');
const [helpLink, setHelpLink] = useState<string>(''); const [helpLink, setHelpLink] = useState<I18nLabel>({
en_US: '',
zh_Hans: '',
});
const onSelectedChangeAction = (child: SidebarChildVO) => { const onSelectedChangeAction = (child: SidebarChildVO) => {
setTitle(child.name); setTitle(child.name);
setSubtitle(child.description); setSubtitle(child.description);

View File

@@ -33,6 +33,7 @@
display: flex; display: flex;
flex-direction: column; flex-direction: column;
gap: 0.2rem; gap: 0.2rem;
min-width: 0;
width: 100%; width: 100%;
} }
@@ -118,3 +119,12 @@
font-weight: 400; font-weight: 400;
color: #2288ee; color: #2288ee;
} }
.bigText {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-size: 1.4rem;
font-weight: bold;
max-width: 100%;
}

View File

@@ -103,7 +103,7 @@ export default function LLMForm({
name: '', name: '',
model_provider: '', model_provider: '',
url: '', url: '',
api_key: '', api_key: 'sk-xxxxx',
abilities: [], abilities: [],
extra_args: [], extra_args: [],
}, },
@@ -130,6 +130,8 @@ export default function LLMForm({
const [requesterDefaultURLList, setRequesterDefaultURLList] = useState< const [requesterDefaultURLList, setRequesterDefaultURLList] = useState<
string[] string[]
>([]); >([]);
const [modelTesting, setModelTesting] = useState(false);
const [currentModelProvider, setCurrentModelProvider] = useState('');
useEffect(() => { useEffect(() => {
initLLMModelFormComponent().then(() => { initLLMModelFormComponent().then(() => {
@@ -137,6 +139,7 @@ export default function LLMForm({
getLLMConfig(initLLMId).then((val) => { getLLMConfig(initLLMId).then((val) => {
form.setValue('name', val.name); form.setValue('name', val.name);
form.setValue('model_provider', val.model_provider); form.setValue('model_provider', val.model_provider);
setCurrentModelProvider(val.model_provider);
form.setValue('url', val.url); form.setValue('url', val.url);
form.setValue('api_key', val.api_key); form.setValue('api_key', val.api_key);
form.setValue( form.setValue(
@@ -308,6 +311,34 @@ export default function LLMForm({
} }
} }
function testLLMModelInForm() {
setModelTesting(true);
httpClient
.testLLMModel('_', {
uuid: '',
name: form.getValues('name'),
description: '',
requester: form.getValues('model_provider'),
requester_config: {
base_url: form.getValues('url'),
timeout: 120,
},
api_keys: [form.getValues('api_key')],
abilities: form.getValues('abilities'),
extra_args: form.getValues('extra_args'),
})
.then((res) => {
console.log(res);
toast.success(t('models.testSuccess'));
})
.catch(() => {
toast.error(t('models.testError'));
})
.finally(() => {
setModelTesting(false);
});
}
return ( return (
<div> <div>
<Dialog <Dialog
@@ -380,6 +411,7 @@ export default function LLMForm({
<Select <Select
onValueChange={(value) => { onValueChange={(value) => {
field.onChange(value); field.onChange(value);
setCurrentModelProvider(value);
const index = requesterNameList.findIndex( const index = requesterNameList.findIndex(
(item) => item.value === value, (item) => item.value === value,
); );
@@ -426,22 +458,28 @@ export default function LLMForm({
</FormItem> </FormItem>
)} )}
/> />
<FormField
control={form.control} {!['lmstudio-chat-completions', 'ollama-chat'].includes(
name="api_key" currentModelProvider,
render={({ field }) => ( ) && (
<FormItem> <FormField
<FormLabel> control={form.control}
{t('models.apiKey')} name="api_key"
<span className="text-red-500">*</span> render={({ field }) => (
</FormLabel> <FormItem>
<FormControl> <FormLabel>
<Input {...field} /> {t('models.apiKey')}
</FormControl> <span className="text-red-500">*</span>
<FormMessage /> </FormLabel>
</FormItem> <FormControl>
)} <Input {...field} />
/> </FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField <FormField
control={form.control} control={form.control}
name="abilities" name="abilities"
@@ -579,6 +617,15 @@ export default function LLMForm({
{editMode ? t('common.save') : t('common.submit')} {editMode ? t('common.save') : t('common.submit')}
</Button> </Button>
<Button
type="button"
variant="outline"
onClick={() => testLLMModelInForm()}
disabled={modelTesting}
>
{t('common.test')}
</Button>
<Button <Button
type="button" type="button"
variant="outline" variant="outline"

View File

@@ -23,6 +23,7 @@
flex-direction: column; flex-direction: column;
justify-content: space-between; justify-content: space-between;
gap: 0.4rem; gap: 0.4rem;
min-width: 0;
} }
.basicInfoNameContainer { .basicInfoNameContainer {
@@ -88,3 +89,12 @@
font-weight: 400; font-weight: 400;
color: #ffcd27; color: #ffcd27;
} }
.bigText {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-size: 1.4rem;
font-weight: bold;
max-width: 100%;
}

View File

@@ -1,5 +1,6 @@
import { IDynamicFormItemSchema } from '@/app/infra/entities/form/dynamic'; import { IDynamicFormItemSchema } from '@/app/infra/entities/form/dynamic';
import { PipelineConfigTab } from '@/app/infra/entities/pipeline'; import { PipelineConfigTab } from '@/app/infra/entities/pipeline';
import { I18nLabel } from '@/app/infra/entities/common';
export interface ApiResponse<T> { export interface ApiResponse<T> {
code: number; code: number;
@@ -7,11 +8,6 @@ export interface ApiResponse<T> {
msg: string; msg: string;
} }
export interface I18nText {
en_US: string;
zh_Hans: string;
}
export interface AsyncTaskCreatedResp { export interface AsyncTaskCreatedResp {
task_id: number; task_id: number;
} }
@@ -26,8 +22,8 @@ export interface ApiRespProviderRequester {
export interface Requester { export interface Requester {
name: string; name: string;
label: I18nText; label: I18nLabel;
description: I18nText; description: I18nLabel;
icon?: string; icon?: string;
spec: { spec: {
config: IDynamicFormItemSchema[]; config: IDynamicFormItemSchema[];
@@ -84,8 +80,8 @@ export interface ApiRespPlatformAdapter {
export interface Adapter { export interface Adapter {
name: string; name: string;
label: I18nText; label: I18nLabel;
description: I18nText; description: I18nLabel;
icon?: string; icon?: string;
spec: { spec: {
config: AdapterSpecConfig[]; config: AdapterSpecConfig[];
@@ -94,7 +90,7 @@ export interface Adapter {
export interface AdapterSpecConfig { export interface AdapterSpecConfig {
default: string | number | boolean | Array<unknown>; default: string | number | boolean | Array<unknown>;
label: I18nText; label: I18nLabel;
name: string; name: string;
required: boolean; required: boolean;
type: string; type: string;
@@ -133,8 +129,8 @@ export interface ApiRespPlugin {
export interface Plugin { export interface Plugin {
author: string; author: string;
name: string; name: string;
description: I18nText; description: I18nLabel;
label: I18nText; label: I18nLabel;
version: string; version: string;
enabled: boolean; enabled: boolean;
priority: number; priority: number;

View File

@@ -271,6 +271,10 @@ class HttpClient {
return this.put(`/api/v1/provider/models/llm/${uuid}`, model); return this.put(`/api/v1/provider/models/llm/${uuid}`, model);
} }
public testLLMModel(uuid: string, model: LLMModel): Promise<object> {
return this.post(`/api/v1/provider/models/llm/${uuid}/test`, model);
}
// ============ Pipeline API ============ // ============ Pipeline API ============
public getGeneralPipelineMetadata(): Promise<GetPipelineMetadataResponseData> { public getGeneralPipelineMetadata(): Promise<GetPipelineMetadataResponseData> {
// as designed, this method will be deprecated, and only for developer to check the prefered config schema // as designed, this method will be deprecated, and only for developer to check the prefered config schema

View File

@@ -2,9 +2,11 @@
import { useRouter } from 'next/navigation'; import { useRouter } from 'next/navigation';
import { Button } from '@/components/ui/button'; import { Button } from '@/components/ui/button';
import { useTranslation } from 'react-i18next';
export default function NotFound() { export default function NotFound() {
const router = useRouter(); const router = useRouter();
const { t } = useTranslation();
return ( return (
<div className="min-h-screen bg-white flex items-center justify-center"> <div className="min-h-screen bg-white flex items-center justify-center">
@@ -18,11 +20,10 @@ export default function NotFound() {
{/* 错误文本 */} {/* 错误文本 */}
<div className="text-center mb-8"> <div className="text-center mb-8">
<h1 className="text-2xl font-normal text-gray-800 mb-2"> <h1 className="text-2xl font-normal text-gray-800 mb-2">
{t('notFound.title')}
</h1> </h1>
<p className="text-base text-gray-600 max-w-[450px] mx-auto mb-8"> <p className="text-base text-gray-600 max-w-[450px] mx-auto mb-8">
URL {t('notFound.description')}
</p> </p>
</div> </div>
@@ -33,26 +34,25 @@ export default function NotFound() {
onClick={() => router.back()} onClick={() => router.back()}
className="h-9 px-4 cursor-pointer" className="h-9 px-4 cursor-pointer"
> >
{t('notFound.back')}
</Button> </Button>
<Button <Button
variant="outline" variant="outline"
onClick={() => router.push('/home')} onClick={() => router.push('/home')}
className="h-9 px-4 cursor-pointer" className="h-9 px-4 cursor-pointer"
> >
{t('notFound.home')}
</Button> </Button>
</div> </div>
{/* 帮助文档链接 */} {/* 帮助文档链接 */}
<div className="text-center"> <div className="text-center">
<p className="text-sm text-gray-600"> <p className="text-sm text-gray-600">
<a <a
href="https://docs.langbot.app" href="https://docs.langbot.app"
className="text-black no-underline hover:underline" className="text-black no-underline hover:underline"
> >
{t('notFound.help')}
</a> </a>
</p> </p>
</div> </div>

View File

@@ -2,7 +2,7 @@
import { ReactNode } from 'react'; import { ReactNode } from 'react';
import '@/i18n'; import '@/i18n';
import { I18nText } from '@/app/infra/entities/api'; import { I18nLabel } from '@/app/infra/entities/common';
interface I18nProviderProps { interface I18nProviderProps {
children: ReactNode; children: ReactNode;
@@ -11,10 +11,10 @@ interface I18nProviderProps {
export default function I18nProvider({ children }: I18nProviderProps) { export default function I18nProvider({ children }: I18nProviderProps) {
return <>{children}</>; return <>{children}</>;
} }
export function i18nObj(i18nText: I18nText): string { export function i18nObj(i18nLabel: I18nLabel): string {
const language = localStorage.getItem('langbot_language'); const language = localStorage.getItem('langbot_language');
if ((language === 'zh-Hans' && i18nText.zh_Hans) || !i18nText.en_US) { if ((language === 'zh-Hans' && i18nLabel.zh_Hans) || !i18nLabel.en_US) {
return i18nText.zh_Hans; return i18nLabel.zh_Hans;
} }
return i18nText.en_US; return i18nLabel.en_US;
} }

View File

@@ -37,6 +37,14 @@ const enUS = {
deleteSuccess: 'Deleted successfully', deleteSuccess: 'Deleted successfully',
deleteError: 'Delete failed: ', deleteError: 'Delete failed: ',
addRound: 'Add Round', addRound: 'Add Round',
test: 'Test',
},
notFound: {
title: 'Page not found',
description: 'The page you are looking for does not exist.',
back: 'Back',
home: 'Home',
help: 'Get Help',
}, },
models: { models: {
title: 'Models', title: 'Models',
@@ -82,6 +90,8 @@ const enUS = {
modelProviderDescription: modelProviderDescription:
'Please fill in the model name provided by the supplier', 'Please fill in the model name provided by the supplier',
selectModel: 'Select Model', selectModel: 'Select Model',
testSuccess: 'Test successful',
testError: 'Test failed, please check your model configuration',
}, },
bots: { bots: {
title: 'Bots', title: 'Bots',

View File

@@ -37,6 +37,15 @@ const zhHans = {
deleteSuccess: '删除成功', deleteSuccess: '删除成功',
deleteError: '删除失败:', deleteError: '删除失败:',
addRound: '添加回合', addRound: '添加回合',
test: '测试',
},
notFound: {
title: '页面不存在',
description:
'您要查找的页面似乎不存在。请检查您输入的 URL 是否正确,或者返回首页。',
back: '上一级',
home: '返回主页',
help: '查看帮助文档',
}, },
models: { models: {
title: '模型配置', title: '模型配置',
@@ -81,6 +90,8 @@ const zhHans = {
selectModelProvider: '选择模型供应商', selectModelProvider: '选择模型供应商',
modelProviderDescription: '请填写供应商向您提供的模型名称', modelProviderDescription: '请填写供应商向您提供的模型名称',
selectModel: '请选择模型', selectModel: '请选择模型',
testSuccess: '测试成功',
testError: '测试失败,请检查模型配置',
}, },
bots: { bots: {
title: '机器人', title: '机器人',