Compare commits

..

18 Commits

Author SHA1 Message Date
Junyan Qin
59bff61409 chore: release v4.0.3.2 2025-05-21 19:46:42 +08:00
Junyan Qin
778693a804 perf: desc of random 2025-05-21 19:45:45 +08:00
Junyan Qin
e5b2da225c perf: no longer get host ip 2025-05-21 19:42:04 +08:00
Steven Lynn
4a988b89a2 fix: update auto-reply probability description in trigger.yaml (#1463) 2025-05-21 17:50:23 +08:00
Junyan Qin
e5e8807312 perf: no longer ask for apikeys for ollama and lm studio 2025-05-20 16:01:20 +08:00
Junyan Qin
1376530c2e fix: conversation is null 2025-05-20 15:32:04 +08:00
Junyan Qin
7d34a2154b perf: unify i18n text class in frontend 2025-05-20 11:32:55 +08:00
Junyan Qin
ff335130ae chore: update CONTRIBUTING 2025-05-20 09:39:46 +08:00
Junyan Qin
0afef0ac0f chore: update pr template 2025-05-20 09:21:59 +08:00
Junyan Qin (Chin)
6447f270ea Update bug-report_en.yml 2025-05-20 09:16:30 +08:00
Junyan Qin (Chin)
81be62e1a4 Update bug-report_en.yml 2025-05-20 09:15:52 +08:00
Junyan Qin (Chin)
409909ccb1 Update bug-report_en.yml (#1456) 2025-05-20 09:14:52 +08:00
Junyan Qin
b821b69dbb chore: perf issue templates 2025-05-20 09:13:13 +08:00
Junyan Qin
7e2448655e chore: add english issue templates 2025-05-20 09:11:47 +08:00
Junyan Qin (Chin)
a7d2a68639 feat: add supports for testing llm models (#1454)
* feat: add supports for testing llm models

* fix: linter error
2025-05-19 23:10:04 +08:00
fdc310
aba51409a7 feat:add qoute message process and add Whether to enable this function (#1446)
* 更新了wechatpad接口,以及适配器

* 更新了wechatpad接口,以及适配器

* 修复一些细节问题,比如at回复,以及启动登录和启动ws长连接的线程同步

* importutil中修复了在wi上启动替换斜杠问题,login中加上了一个login,暂时没啥用。wechatpad中做出了一些细节修改

* 更新了wechatpad接口,以及适配器

* 怎加了处理图片链接转换为image_base64发送

* feat(wechatpad): 调整日志+bugfix

* feat(wechatpad): fix typo

* 修正了发送语音api参数错误,添加了发送链接处理为base64数据(好像只有一部分链接可以)

* 修复了部分手抽的typo错误

* chore: remove manager.py

* feat:add qoute message process and add Whether to enable this function

* chore: add db migration for this change

---------

Co-authored-by: shinelin <shinelinxx@gmail.com>
Co-authored-by: Junyan Qin (Chin) <rockchinq@gmail.com>
2025-05-19 22:24:18 +08:00
sheetung
5e5d37cbf1 St/webui (#1452)
* 解决webUI模型配置页面卡片溢出问题

* fix: webUI卡片文本溢出问题
2025-05-19 18:11:50 +08:00
sheetung
e5a99a0fe4 解决webUI模型配置页面卡片溢出问题 (#1451) 2025-05-19 13:14:39 +08:00
31 changed files with 422 additions and 119 deletions

View File

@@ -1,5 +1,5 @@
name: 漏洞反馈
description: 报错或漏洞请使用这个模板创建不使用此模板创建的异常、漏洞相关issue将被直接关闭。由于自己操作不当/不甚了解所用技术栈引起的网络连接问题恕无法解决,请勿提 issue。容器间网络连接问题参考文档 https://docs.langbot.app/zh/workshop/network-details.html
description: 【供中文用户】报错或漏洞请使用这个模板创建不使用此模板创建的异常、漏洞相关issue将被直接关闭。由于自己操作不当/不甚了解所用技术栈引起的网络连接问题恕无法解决,请勿提 issue。容器间网络连接问题参考文档 https://docs.langbot.app/zh/workshop/network-details.html
title: "[Bug]: "
labels: ["bug?"]
body:
@@ -19,12 +19,12 @@ body:
- type: textarea
attributes:
label: 复现步骤
description: 如何重现这个问题,越详细越好;请贴上所有相关的配置文件和元数据文件(注意隐去敏感信息)
description: 如何重现这个问题,越详细越好;提供越多信息,我们会越快解决问题。
validations:
required: true
required: false
- type: textarea
attributes:
label: 启用的插件
description: 有些情况可能和插件功能有关,建议提供插件启用情况。可以使用`!plugin`命令查看已启用的插件
description: 有些情况可能和插件功能有关,建议提供插件启用情况。
validations:
required: false

View File

@@ -0,0 +1,30 @@
name: Bug report
description: Report bugs or vulnerabilities using this template. For container network connection issues, refer to the documentation https://docs.langbot.app/en/workshop/network-details.html
title: "[Bug]: "
labels: ["bug?"]
body:
- type: input
attributes:
label: Runtime environment
description: LangBot version, operating system, system architecture, **Python version**, **host location**
placeholder: "For example: v3.3.0, CentOS x64 Python 3.10.3, Docker system directly write Docker"
validations:
required: true
- type: textarea
attributes:
label: Exception
description: Describe the exception in detail, what happened and when it happened. **Please include log information.**
validations:
required: true
- type: textarea
attributes:
label: Reproduction steps
description: How to reproduce this problem, the more detailed the better; the more information you provide, the faster we will solve the problem.
validations:
required: false
- type: textarea
attributes:
label: Enabled plugins
description: Some cases may be related to plugin functionality, so please provide the plugin enablement status.
validations:
required: false

View File

@@ -1,7 +1,7 @@
name: 需求建议
title: "[Feature]: "
labels: ["改进"]
description: "新功能或现有功能优化请使用这个模板不符合类别的issue将被直接关闭"
labels: []
description: "【供中文用户】新功能或现有功能优化请使用这个模板不符合类别的issue将被直接关闭"
body:
- type: dropdown
attributes:

View File

@@ -0,0 +1,21 @@
name: Feature request
title: "[Feature]: "
labels: []
description: "New features or existing feature improvements should use this template; issues that do not match will be closed directly"
body:
- type: dropdown
attributes:
label: This is a?
description: New feature request or existing feature improvement
options:
- New feature
- Existing feature improvement
validations:
required: true
- type: textarea
attributes:
label: Detailed description
description: Detailed description, the more detailed the better
validations:
required: true

View File

@@ -1,7 +1,7 @@
name: 提交新插件
title: "[Plugin]: 请求登记新插件"
labels: ["独立插件"]
description: "本模板供且仅供提交新插件使用"
description: "【供中文用户】本模板供且仅供提交新插件使用"
body:
- type: input
attributes:

View File

@@ -0,0 +1,24 @@
name: Submit a new plugin
title: "[Plugin]: Request to register a new plugin"
labels: ["Independent Plugin"]
description: "This template is only for submitting new plugins"
body:
- type: input
attributes:
label: Plugin name
description: Fill in the name of the plugin
validations:
required: true
- type: textarea
attributes:
label: Plugin code repository address
description: Only support Github
validations:
required: true
- type: textarea
attributes:
label: Plugin description
description: The description of the plugin
validations:
required: true

View File

@@ -1,20 +1,21 @@
## 概述
## 概述 / Overview
实现/解决/优化的内容:
> 请在此部分填写你实现/解决/优化的内容:
> Summary of what you implemented/solved/optimized:
## 检查清单
## 检查清单 / Checklist
### PR 作者完成
### PR 作者完成 / For PR author
*请在方括号间写`x`以打勾
*请在方括号间写`x`以打勾 / Please tick the box with `x`*
- [ ] 阅读仓库[贡献指引](https://github.com/RockChinQ/LangBot/blob/master/CONTRIBUTING.md)了吗?
- [ ] 与项目所有者沟通过了吗?
- [ ] 我确定已自行测试所作的更改,确保功能符合预期。
- [ ] 阅读仓库[贡献指引](https://github.com/RockChinQ/LangBot/blob/master/CONTRIBUTING.md)了吗? / Have you read the [contribution guide](https://github.com/RockChinQ/LangBot/blob/master/CONTRIBUTING.md)?
- [ ] 与项目所有者沟通过了吗? / Have you communicated with the project maintainer?
- [ ] 我确定已自行测试所作的更改,确保功能符合预期。 / I have tested the changes and ensured they work as expected.
### 项目所有者完成
### 项目维护者完成 / For project maintainer
- [ ] 相关 issues 链接了吗?
- [ ] 配置项写好了吗?迁移写好了吗?生效了吗?
- [ ] 依赖加到 pyproject.toml 和 core/bootutils/deps.py 了吗
- [ ] 文档编写了吗?
- [ ] 相关 issues 链接了吗? / Have you linked the related issues?
- [ ] 配置项写好了吗?迁移写好了吗?生效了吗? / Have you written the configuration items? Have you written the migration? Has it taken effect?
- [ ] 依赖加到 pyproject.toml 和 core/bootutils/deps.py 了吗 / Have you added the dependencies to pyproject.toml and core/bootutils/deps.py?
- [ ] 文档编写了吗? / Have you written the documentation?

View File

@@ -5,22 +5,27 @@
### 贡献形式
- 提交PR解决issues中提到的bug或期待的功能
- 提交PR实现您设想的功能请先提出issue与者沟通)
- 优化代码架构,使各个模块的组织更加整洁优雅
- 在issues中提出发现的bug或者期待的功能
- 提交PR实现您设想的功能请先提出issue与项目维护者沟通)
- 为本项目在其他社交平台撰写文章、制作视频等
- 为本项目的衍生项目作出贡献,或开发插件增加功能
### 如何开始
### 沟通语言规范
- 加入本项目交流群,一同探讨项目相关事务
- 解决本项目或衍生项目的issues中亟待解决的问题
- 阅读并完善本项目文档
- 在各个社交媒体撰写本项目教程等
- 在 PR 和 Commit Message 中请使用全英文
- 对于中文用户issue 中可以使用中文
### 代码规范
<hr/>
- 代码中的注解`务必`符合Google风格的规范
- 模块顶部的引入代码请遵循`系统模块``第三方库模块``自定义模块`的顺序进行引入
- `不要`直接引入模块的特定属性,而是引入这个模块,再通过`xxx.yyy`的形式使用属性
- 任何作用域的字段`必须`先声明后使用,并在声明处注明类型提示
## Guidelines
### Contribution
- Submit PRs to solve bugs or features in the issues
- Submit PRs to implement your ideas (Please create an issue first and communicate with the project maintainer)
- Write articles or make videos about this project on other social platforms
- Contribute to the development of derivative projects, or develop plugins to add features
### Spoken Language
- Use English in PRs and Commit Messages
- For English users, you can use English in issues

View File

@@ -36,3 +36,11 @@ class LLMModelsRouterGroup(group.RouterGroup):
await self.ap.model_service.delete_llm_model(model_uuid)
return self.success()
@self.route('/<model_uuid>/test', methods=['POST'])
async def _(model_uuid: str) -> str:
json_data = await quart.request.json
await self.ap.model_service.test_llm_model(model_uuid, json_data)
return self.success()

View File

@@ -6,6 +6,8 @@ import sqlalchemy
from ....core import app
from ....entity.persistence import model as persistence_model
from ....entity.persistence import pipeline as persistence_pipeline
from ....provider.modelmgr import requester as model_requester
from ....provider import entities as llm_entities
class ModelsService:
@@ -78,3 +80,26 @@ class ModelsService:
)
await self.ap.model_mgr.remove_llm_model(model_uuid)
async def test_llm_model(self, model_uuid: str, model_data: dict) -> None:
runtime_llm_model: model_requester.RuntimeLLMModel | None = None
if model_uuid != '_':
for model in self.ap.model_mgr.llm_models:
if model.model_entity.uuid == model_uuid:
runtime_llm_model = model
break
if runtime_llm_model is None:
raise Exception('model not found')
else:
runtime_llm_model = await self.ap.model_mgr.init_runtime_llm_model(model_data)
await runtime_llm_model.requester.invoke_llm(
query=None,
model=runtime_llm_model,
messages=[llm_entities.Message(role='user', content='Hello, world!')],
funcs=[],
extra_args={},
)

View File

@@ -23,7 +23,7 @@ from ..api.http.service import model as model_service
from ..api.http.service import pipeline as pipeline_service
from ..api.http.service import bot as bot_service
from ..discover import engine as discover_engine
from ..utils import logcache, ip
from ..utils import logcache
from . import taskmgr
from . import entities as core_entities
@@ -166,23 +166,16 @@ class Application:
host_ip = '127.0.0.1'
public_ip = await ip.get_myip()
port = self.instance_config.data['api']['port']
tips = f"""
=======================================
您可通过以下方式访问管理面板
Access WebUI / 访问管理面板
🏠 本地地址:http://{host_ip}:{port}/
🌐 公网地址http://{public_ip}:{port}/
🏠 Local Address: http://{host_ip}:{port}/
🌐 Public Address: http://<Your Public IP>:{port}/
📌 如果您在容器中运行此程序,请确保容器的 {port} 端口已对外暴露
🔗 若要使用公网地址访问,请阅读以下须知
1. 公网地址仅供参考,请以您的主机公网 IP 为准;
2. 要使用公网地址访问,请确保您的主机具有公网 IP并且系统防火墙已放行 {port} 端口;
🤯 WebUI 仍处于 Beta 测试阶段,如有问题或建议请反馈到 https://github.com/RockChinQ/LangBot/issues
📌 Running this program in a container? Please ensure that the {port} port is exposed
=======================================
""".strip()
for line in tips.split('\n'):

View File

@@ -0,0 +1,36 @@
from .. import migration
import sqlalchemy
from ...entity.persistence import pipeline as persistence_pipeline
@migration.migration_class(2)
class DBMigrateCombineQuoteMsgConfig(migration.DBMigration):
"""引用消息合并配置"""
async def upgrade(self):
"""升级"""
# read all pipelines
pipelines = await self.ap.persistence_mgr.execute_async(sqlalchemy.select(persistence_pipeline.LegacyPipeline))
for pipeline in pipelines:
serialized_pipeline = self.ap.persistence_mgr.serialize_model(persistence_pipeline.LegacyPipeline, pipeline)
config = serialized_pipeline['config']
if 'misc' not in config['trigger']:
config['trigger']['misc'] = {}
if 'combine-quote-message' not in config['trigger']['misc']:
config['trigger']['misc']['combine-quote-message'] = False
await self.ap.persistence_mgr.execute_async(
sqlalchemy.update(persistence_pipeline.LegacyPipeline)
.where(persistence_pipeline.LegacyPipeline.uuid == serialized_pipeline['uuid'])
.values({'config': config, 'for_version': self.ap.ver_mgr.get_current_version()})
)
async def downgrade(self):
"""降级"""
pass

View File

@@ -34,6 +34,7 @@ class PreProcessor(stage.PipelineStage):
session = await self.ap.sess_mgr.get_session(query)
# 非 local-agent 时llm_model 为 None
llm_model = (
await self.ap.model_mgr.get_model_by_uuid(query.pipeline_config['ai']['local-agent']['model'])
@@ -81,6 +82,7 @@ class PreProcessor(stage.PipelineStage):
content_list = []
plain_text = ''
qoute_msg = query.pipeline_config["trigger"].get("misc",'').get("combine-quote-message")
for me in query.message_chain:
if isinstance(me, platform_message.Plain):
@@ -92,6 +94,18 @@ class PreProcessor(stage.PipelineStage):
):
if me.base64 is not None:
content_list.append(llm_entities.ContentElement.from_image_base64(me.base64))
elif isinstance(me, platform_message.Quote) and qoute_msg:
for msg in me.origin:
if isinstance(msg, platform_message.Plain):
content_list.append(llm_entities.ContentElement.from_text(msg.text))
elif isinstance(msg, platform_message.Image):
if selected_runner != 'local-agent' or query.use_llm_model.model_entity.abilities.__contains__(
'vision'
):
if msg.base64 is not None:
content_list.append(llm_entities.ContentElement.from_image_base64(msg.base64))
query.variables['user_message_text'] = plain_text

View File

@@ -15,6 +15,7 @@ from ...utils import image
class AiocqhttpMessageConverter(adapter.MessageConverter):
@staticmethod
async def yiri2target(
message_chain: platform_message.MessageChain,
@@ -66,14 +67,40 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
return msg_list, msg_id, msg_time
@staticmethod
async def target2yiri(message: str, message_id: int = -1):
async def target2yiri(message: str, message_id: int = -1,bot=None):
message = aiocqhttp.Message(message)
async def process_message_data(msg_data, reply_list):
if msg_data["type"] == "image":
image_base64, image_format = await image.qq_image_url_to_base64(msg_data["data"]['url'])
reply_list.append(
platform_message.Image(base64=f'data:image/{image_format};base64,{image_base64}'))
elif msg_data["type"] == "text":
reply_list.append(platform_message.Plain(text=msg_data["data"]["text"]))
elif msg_data["type"] == "forward": # 这里来应该传入转发消息组暂时传入qoute
for forward_msg_datas in msg_data["data"]["content"]:
for forward_msg_data in forward_msg_datas["message"]:
await process_message_data(forward_msg_data, reply_list)
elif msg_data["type"] == "at":
if msg_data["data"]['qq'] == 'all':
reply_list.append(platform_message.AtAll())
else:
reply_list.append(
platform_message.At(
target=msg_data["data"]['qq'],
)
)
yiri_msg_list = []
yiri_msg_list.append(platform_message.Source(id=message_id, time=datetime.datetime.now()))
for msg in message:
reply_list = []
if msg.type == 'at':
if msg.data['qq'] == 'all':
yiri_msg_list.append(platform_message.AtAll())
@@ -88,20 +115,46 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
elif msg.type == 'image':
image_base64, image_format = await image.qq_image_url_to_base64(msg.data['url'])
yiri_msg_list.append(platform_message.Image(base64=f'data:image/{image_format};base64,{image_base64}'))
elif msg.type == 'forward':
# 暂时不太合理
# msg_datas = await bot.get_msg(message_id=message_id)
# print(msg_datas)
# for msg_data in msg_datas["message"]:
# await process_message_data(msg_data, yiri_msg_list)
pass
elif msg.type == 'reply': # 此处处理引用消息传入Qoute
msg_datas = await bot.get_msg(message_id=msg.data["id"])
for msg_data in msg_datas["message"]:
await process_message_data(msg_data, reply_list)
reply_msg = platform_message.Quote(message_id=msg.data["id"],sender_id=msg_datas["user_id"],origin=reply_list)
yiri_msg_list.append(reply_msg)
chain = platform_message.MessageChain(yiri_msg_list)
return chain
class AiocqhttpEventConverter(adapter.EventConverter):
@staticmethod
async def yiri2target(event: platform_events.MessageEvent, bot_account_id: int):
return event.source_platform_object
@staticmethod
async def target2yiri(event: aiocqhttp.Event):
yiri_chain = await AiocqhttpMessageConverter.target2yiri(event.message, event.message_id)
async def target2yiri(event: aiocqhttp.Event,bot=None):
yiri_chain = await AiocqhttpMessageConverter.target2yiri(event.message, event.message_id,bot)
if event.message_type == 'group':
permission = 'MEMBER'
@@ -205,7 +258,7 @@ class AiocqhttpAdapter(adapter.MessagePlatformAdapter):
async def on_message(event: aiocqhttp.Event):
self.bot_account_id = event.self_id
try:
return await callback(await self.event_converter.target2yiri(event), self)
return await callback(await self.event_converter.target2yiri(event,self.bot), self)
except Exception:
traceback.print_exc()

View File

@@ -4,9 +4,6 @@ import sqlalchemy
from . import entities, requester
from ...core import app
from ...core import entities as core_entities
from .. import entities as llm_entities
from ..tools import entities as tools_entities
from ...discover import engine
from . import token
from ...entity.persistence import model as persistence_model
@@ -69,12 +66,11 @@ class ModelManager:
for llm_model in llm_models:
await self.load_llm_model(llm_model)
async def load_llm_model(
async def init_runtime_llm_model(
self,
model_info: persistence_model.LLMModel | sqlalchemy.Row[persistence_model.LLMModel] | dict,
):
"""加载模型"""
"""初始化运行时模型"""
if isinstance(model_info, sqlalchemy.Row):
model_info = persistence_model.LLMModel(**model_info._mapping)
elif isinstance(model_info, dict):
@@ -92,6 +88,15 @@ class ModelManager:
),
requester=requester_inst,
)
return runtime_llm_model
async def load_llm_model(
self,
model_info: persistence_model.LLMModel | sqlalchemy.Row[persistence_model.LLMModel] | dict,
):
"""加载模型"""
runtime_llm_model = await self.init_runtime_llm_model(model_info)
self.llm_models.append(runtime_llm_model)
async def get_model_by_name(self, name: str) -> entities.LLMModelInfo: # deprecated
@@ -132,12 +137,3 @@ class ModelManager:
if component.metadata.name == name:
return component
return None
async def invoke_llm(
self,
query: core_entities.Query,
model_uuid: str,
messages: list[llm_entities.Message],
funcs: list[tools_entities.LLMFunction] = None,
) -> llm_entities.Message:
pass

View File

@@ -93,6 +93,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
async def _chat_messages(self, query: core_entities.Query) -> typing.AsyncGenerator[llm_entities.Message, None]:
"""调用聊天助手"""
cov_id = query.session.using_conversation.uuid or ''
query.variables['conversation_id'] = cov_id
plain_text, image_ids = await self._preprocess_user_message(query)
@@ -155,6 +156,7 @@ class DifyServiceAPIRunner(runner.RequestRunner):
) -> typing.AsyncGenerator[llm_entities.Message, None]:
"""调用聊天助手"""
cov_id = query.session.using_conversation.uuid or ''
query.variables['conversation_id'] = cov_id
plain_text, image_ids = await self._preprocess_user_message(query)

View File

@@ -1,6 +1,6 @@
semantic_version = 'v4.0.3.1'
semantic_version = 'v4.0.3.2'
required_database_version = 1
required_database_version = 2
"""标记本版本所需要的数据库结构版本,用于判断数据库迁移"""
debug_mode = False

View File

@@ -1,10 +0,0 @@
import aiohttp
async def get_myip() -> str:
try:
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=10)) as session:
async with session.get('https://ip.useragentinfo.com/myip') as response:
return await response.text()
except Exception:
return '0.0.0.0'

View File

@@ -46,8 +46,8 @@ stages:
en_US: Random
zh_Hans: 随机
description:
en_US: The probability of the random response, range from 0.0 to 1.0
zh_Hans: 随机响应概率范围0.0-1.0,对应 0% 到 100%
en_US: 'Probability of automatically responding to messages that are not matched by other rules. Range: 0.0-1.0 (0%-100%).'
zh_Hans: '自动响应其他规则未匹配的消息的概率范围0.0-1.0 (0%-100%)。'
type: float
required: false
default: 0
@@ -117,3 +117,18 @@ stages:
type: array[string]
required: true
default: []
- name: misc
label:
en_US: Misc
zh_Hans: 杂项
config:
- name: combine-quote-message
label:
en_US: Combine Quote Message
zh_Hans: 合并引用消息
description:
en_US: If enabled, the bot will combine the quote message with the user's message
zh_Hans: 如果启用,将合并引用消息与用户发送的消息
type: boolean
required: true
default: true

View File

@@ -33,6 +33,8 @@
display: flex;
flex-direction: column;
gap: 0.2rem;
min-width: 0;
width: 100%;
}
.basicInfoNameContainer {
@@ -43,12 +45,18 @@
.basicInfoName {
font-size: 1.4rem;
font-weight: 500;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.basicInfoDescription {
font-size: 1rem;
font-weight: 300;
color: #b1b1b1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.basicInfoAdapterContainer {
@@ -88,3 +96,12 @@
font-weight: 500;
color: #626262;
}
.bigText {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-size: 1.4rem;
font-weight: bold;
max-width: 100%;
}

View File

@@ -1,5 +1,5 @@
import styles from './HomeSidebar.module.css';
import { I18nText } from '@/app/infra/entities/api';
import { I18nLabel } from '@/app/infra/entities/common';
export interface ISidebarChildVO {
id: string;
@@ -7,7 +7,7 @@ export interface ISidebarChildVO {
name: string;
route: string;
description: string;
helpLink: I18nText;
helpLink: I18nLabel;
}
export class SidebarChildVO {
@@ -16,7 +16,7 @@ export class SidebarChildVO {
name: string;
route: string;
description: string;
helpLink: I18nText;
helpLink: I18nLabel;
constructor(props: ISidebarChildVO) {
this.id = props.id;

View File

@@ -1,6 +1,6 @@
import { i18nObj } from '@/i18n/I18nProvider';
import styles from './HomeTittleBar.module.css';
import { I18nText } from '@/app/infra/entities/api';
import { I18nLabel } from '@/app/infra/entities/common';
export default function HomeTitleBar({
title,
@@ -9,7 +9,7 @@ export default function HomeTitleBar({
}: {
title: string;
subtitle: string;
helpLink: I18nText;
helpLink: I18nLabel;
}) {
return (
<div className={`${styles.titleBarContainer}`}>

View File

@@ -5,7 +5,7 @@ import HomeSidebar from '@/app/home/components/home-sidebar/HomeSidebar';
import HomeTitleBar from '@/app/home/components/home-titlebar/HomeTitleBar';
import React, { useState } from 'react';
import { SidebarChildVO } from '@/app/home/components/home-sidebar/HomeSidebarChild';
import { I18nText } from '@/app/infra/entities/api';
import { I18nLabel } from '@/app/infra/entities/common';
export default function HomeLayout({
children,
@@ -14,7 +14,7 @@ export default function HomeLayout({
}>) {
const [title, setTitle] = useState<string>('');
const [subtitle, setSubtitle] = useState<string>('');
const [helpLink, setHelpLink] = useState<I18nText>({
const [helpLink, setHelpLink] = useState<I18nLabel>({
en_US: '',
zh_Hans: '',
});

View File

@@ -33,6 +33,7 @@
display: flex;
flex-direction: column;
gap: 0.2rem;
min-width: 0;
width: 100%;
}
@@ -118,3 +119,12 @@
font-weight: 400;
color: #2288ee;
}
.bigText {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-size: 1.4rem;
font-weight: bold;
max-width: 100%;
}

View File

@@ -103,7 +103,7 @@ export default function LLMForm({
name: '',
model_provider: '',
url: '',
api_key: '',
api_key: 'sk-xxxxx',
abilities: [],
extra_args: [],
},
@@ -130,6 +130,8 @@ export default function LLMForm({
const [requesterDefaultURLList, setRequesterDefaultURLList] = useState<
string[]
>([]);
const [modelTesting, setModelTesting] = useState(false);
const [currentModelProvider, setCurrentModelProvider] = useState('');
useEffect(() => {
initLLMModelFormComponent().then(() => {
@@ -137,6 +139,7 @@ export default function LLMForm({
getLLMConfig(initLLMId).then((val) => {
form.setValue('name', val.name);
form.setValue('model_provider', val.model_provider);
setCurrentModelProvider(val.model_provider);
form.setValue('url', val.url);
form.setValue('api_key', val.api_key);
form.setValue(
@@ -308,6 +311,34 @@ export default function LLMForm({
}
}
function testLLMModelInForm() {
setModelTesting(true);
httpClient
.testLLMModel('_', {
uuid: '',
name: form.getValues('name'),
description: '',
requester: form.getValues('model_provider'),
requester_config: {
base_url: form.getValues('url'),
timeout: 120,
},
api_keys: [form.getValues('api_key')],
abilities: form.getValues('abilities'),
extra_args: form.getValues('extra_args'),
})
.then((res) => {
console.log(res);
toast.success(t('models.testSuccess'));
})
.catch(() => {
toast.error(t('models.testError'));
})
.finally(() => {
setModelTesting(false);
});
}
return (
<div>
<Dialog
@@ -380,6 +411,7 @@ export default function LLMForm({
<Select
onValueChange={(value) => {
field.onChange(value);
setCurrentModelProvider(value);
const index = requesterNameList.findIndex(
(item) => item.value === value,
);
@@ -426,22 +458,28 @@ export default function LLMForm({
</FormItem>
)}
/>
<FormField
control={form.control}
name="api_key"
render={({ field }) => (
<FormItem>
<FormLabel>
{t('models.apiKey')}
<span className="text-red-500">*</span>
</FormLabel>
<FormControl>
<Input {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
{!['lmstudio-chat-completions', 'ollama-chat'].includes(
currentModelProvider,
) && (
<FormField
control={form.control}
name="api_key"
render={({ field }) => (
<FormItem>
<FormLabel>
{t('models.apiKey')}
<span className="text-red-500">*</span>
</FormLabel>
<FormControl>
<Input {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
)}
<FormField
control={form.control}
name="abilities"
@@ -579,6 +617,15 @@ export default function LLMForm({
{editMode ? t('common.save') : t('common.submit')}
</Button>
<Button
type="button"
variant="outline"
onClick={() => testLLMModelInForm()}
disabled={modelTesting}
>
{t('common.test')}
</Button>
<Button
type="button"
variant="outline"

View File

@@ -23,6 +23,7 @@
flex-direction: column;
justify-content: space-between;
gap: 0.4rem;
min-width: 0;
}
.basicInfoNameContainer {
@@ -88,3 +89,12 @@
font-weight: 400;
color: #ffcd27;
}
.bigText {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
font-size: 1.4rem;
font-weight: bold;
max-width: 100%;
}

View File

@@ -1,5 +1,6 @@
import { IDynamicFormItemSchema } from '@/app/infra/entities/form/dynamic';
import { PipelineConfigTab } from '@/app/infra/entities/pipeline';
import { I18nLabel } from '@/app/infra/entities/common';
export interface ApiResponse<T> {
code: number;
@@ -7,11 +8,6 @@ export interface ApiResponse<T> {
msg: string;
}
export interface I18nText {
en_US: string;
zh_Hans: string;
}
export interface AsyncTaskCreatedResp {
task_id: number;
}
@@ -26,8 +22,8 @@ export interface ApiRespProviderRequester {
export interface Requester {
name: string;
label: I18nText;
description: I18nText;
label: I18nLabel;
description: I18nLabel;
icon?: string;
spec: {
config: IDynamicFormItemSchema[];
@@ -84,8 +80,8 @@ export interface ApiRespPlatformAdapter {
export interface Adapter {
name: string;
label: I18nText;
description: I18nText;
label: I18nLabel;
description: I18nLabel;
icon?: string;
spec: {
config: AdapterSpecConfig[];
@@ -94,7 +90,7 @@ export interface Adapter {
export interface AdapterSpecConfig {
default: string | number | boolean | Array<unknown>;
label: I18nText;
label: I18nLabel;
name: string;
required: boolean;
type: string;
@@ -133,8 +129,8 @@ export interface ApiRespPlugin {
export interface Plugin {
author: string;
name: string;
description: I18nText;
label: I18nText;
description: I18nLabel;
label: I18nLabel;
version: string;
enabled: boolean;
priority: number;

View File

@@ -271,6 +271,10 @@ class HttpClient {
return this.put(`/api/v1/provider/models/llm/${uuid}`, model);
}
public testLLMModel(uuid: string, model: LLMModel): Promise<object> {
return this.post(`/api/v1/provider/models/llm/${uuid}/test`, model);
}
// ============ Pipeline API ============
public getGeneralPipelineMetadata(): Promise<GetPipelineMetadataResponseData> {
// as designed, this method will be deprecated, and only for developer to check the prefered config schema

View File

@@ -2,7 +2,7 @@
import { ReactNode } from 'react';
import '@/i18n';
import { I18nText } from '@/app/infra/entities/api';
import { I18nLabel } from '@/app/infra/entities/common';
interface I18nProviderProps {
children: ReactNode;
@@ -11,10 +11,10 @@ interface I18nProviderProps {
export default function I18nProvider({ children }: I18nProviderProps) {
return <>{children}</>;
}
export function i18nObj(i18nText: I18nText): string {
export function i18nObj(i18nLabel: I18nLabel): string {
const language = localStorage.getItem('langbot_language');
if ((language === 'zh-Hans' && i18nText.zh_Hans) || !i18nText.en_US) {
return i18nText.zh_Hans;
if ((language === 'zh-Hans' && i18nLabel.zh_Hans) || !i18nLabel.en_US) {
return i18nLabel.zh_Hans;
}
return i18nText.en_US;
return i18nLabel.en_US;
}

View File

@@ -37,6 +37,7 @@ const enUS = {
deleteSuccess: 'Deleted successfully',
deleteError: 'Delete failed: ',
addRound: 'Add Round',
test: 'Test',
},
notFound: {
title: 'Page not found',
@@ -89,6 +90,8 @@ const enUS = {
modelProviderDescription:
'Please fill in the model name provided by the supplier',
selectModel: 'Select Model',
testSuccess: 'Test successful',
testError: 'Test failed, please check your model configuration',
},
bots: {
title: 'Bots',

View File

@@ -37,6 +37,7 @@ const zhHans = {
deleteSuccess: '删除成功',
deleteError: '删除失败:',
addRound: '添加回合',
test: '测试',
},
notFound: {
title: '页面不存在',
@@ -89,6 +90,8 @@ const zhHans = {
selectModelProvider: '选择模型供应商',
modelProviderDescription: '请填写供应商向您提供的模型名称',
selectModel: '请选择模型',
testSuccess: '测试成功',
testError: '测试失败,请检查模型配置',
},
bots: {
title: '机器人',