mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 11:29:39 +08:00
Compare commits
1 Commits
763c1a885c
...
feature/we
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a4995c8cd9 |
11
.github/pull_request_template.md
vendored
11
.github/pull_request_template.md
vendored
@@ -2,17 +2,6 @@
|
||||
|
||||
> 请在此部分填写你实现/解决/优化的内容:
|
||||
> Summary of what you implemented/solved/optimized:
|
||||
>
|
||||
|
||||
### 更改前后对比截图 / Screenshots
|
||||
|
||||
> 请在此部分粘贴更改前后对比截图(可以是界面截图、控制台输出、对话截图等):
|
||||
> Please paste the screenshots of changes before and after here (can be interface screenshots, console output, conversation screenshots, etc.):
|
||||
>
|
||||
> 修改前 / Before:
|
||||
>
|
||||
> 修改后 / After:
|
||||
>
|
||||
|
||||
## 检查清单 / Checklist
|
||||
|
||||
|
||||
11
.github/workflows/build-docker-image.yml
vendored
11
.github/workflows/build-docker-image.yml
vendored
@@ -1,9 +1,10 @@
|
||||
name: Build Docker Image
|
||||
on:
|
||||
#防止fork乱用action设置只能手动触发构建
|
||||
workflow_dispatch:
|
||||
## 发布release的时候会自动构建
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
publish-docker-image:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -40,9 +41,5 @@ jobs:
|
||||
run: docker login --username=${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Create Buildx
|
||||
run: docker buildx create --name mybuilder --use
|
||||
- name: Build for Release # only relase, exlude pre-release
|
||||
if: ${{ github.event.release.prerelease == false }}
|
||||
run: docker buildx build --platform linux/amd64 -t rockchin/langbot:${{ steps.check_version.outputs.version }} -t rockchin/langbot:latest . --push
|
||||
- name: Build for Pre-release # no update for latest tag
|
||||
if: ${{ github.event.release.prerelease == true }}
|
||||
run: docker buildx build --platform linux/amd64 -t rockchin/langbot:${{ steps.check_version.outputs.version }} . --push
|
||||
- name: Build # image name: rockchin/langbot:<VERSION>
|
||||
run: docker buildx build --platform linux/arm64,linux/amd64 -t rockchin/langbot:${{ steps.check_version.outputs.version }} -t rockchin/langbot:latest . --push
|
||||
|
||||
46
.github/workflows/publish-to-pypi.yml
vendored
46
.github/workflows/publish-to-pypi.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Build and Publish to PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # Required for trusted publishing to PyPI
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
|
||||
- name: Build frontend
|
||||
run: |
|
||||
cd web
|
||||
npm install -g pnpm
|
||||
pnpm install
|
||||
pnpm build
|
||||
mkdir -p ../src/langbot/web/out
|
||||
cp -r out ../src/langbot/web/
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "latest"
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
uv build
|
||||
|
||||
- name: Publish to PyPI
|
||||
run: |
|
||||
uv publish --token ${{ secrets.PYPI_TOKEN }}
|
||||
108
.github/workflows/test-dev-image.yaml
vendored
108
.github/workflows/test-dev-image.yaml
vendored
@@ -1,108 +0,0 @@
|
||||
name: Test Dev Image
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Build Dev Image"]
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
test-dev-image:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if the build workflow succeeded
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update Docker Compose to use master tag
|
||||
working-directory: ./docker
|
||||
run: |
|
||||
# Replace 'latest' with 'master' tag for testing the dev image
|
||||
sed -i 's/rockchin\/langbot:latest/rockchin\/langbot:master/g' docker-compose.yaml
|
||||
echo "Updated docker-compose.yaml to use master tag:"
|
||||
cat docker-compose.yaml
|
||||
|
||||
- name: Start Docker Compose
|
||||
working-directory: ./docker
|
||||
run: docker compose up -d
|
||||
|
||||
- name: Wait and Test API
|
||||
run: |
|
||||
# Function to test API endpoint
|
||||
test_api() {
|
||||
echo "Testing API endpoint..."
|
||||
response=$(curl -s --connect-timeout 10 --max-time 30 -w "\n%{http_code}" http://localhost:5300/api/v1/system/info 2>&1)
|
||||
curl_exit_code=$?
|
||||
|
||||
if [ $curl_exit_code -ne 0 ]; then
|
||||
echo "Curl failed with exit code: $curl_exit_code"
|
||||
echo "Error: $response"
|
||||
return 1
|
||||
fi
|
||||
|
||||
http_code=$(echo "$response" | tail -n 1)
|
||||
response_body=$(echo "$response" | head -n -1)
|
||||
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "API is healthy! Response code: $http_code"
|
||||
echo "Response: $response_body"
|
||||
return 0
|
||||
else
|
||||
echo "API returned non-200 response: $http_code"
|
||||
echo "Response body: $response_body"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Wait 30 seconds before first attempt
|
||||
echo "Waiting 30 seconds for services to start..."
|
||||
sleep 30
|
||||
|
||||
# Try up to 3 times with 30-second intervals
|
||||
max_attempts=3
|
||||
attempt=1
|
||||
|
||||
while [ $attempt -le $max_attempts ]; do
|
||||
echo "Attempt $attempt of $max_attempts"
|
||||
|
||||
if test_api; then
|
||||
echo "Success! API is responding correctly."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ $attempt -lt $max_attempts ]; then
|
||||
echo "Retrying in 30 seconds..."
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
# All attempts failed
|
||||
echo "Failed to get healthy response after $max_attempts attempts"
|
||||
exit 1
|
||||
|
||||
- name: Show Container Logs on Failure
|
||||
if: failure()
|
||||
working-directory: ./docker
|
||||
run: |
|
||||
echo "=== Docker Compose Status ==="
|
||||
docker compose ps
|
||||
echo ""
|
||||
echo "=== LangBot Logs ==="
|
||||
docker compose logs langbot
|
||||
echo ""
|
||||
echo "=== Plugin Runtime Logs ==="
|
||||
docker compose logs langbot_plugin_runtime
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
working-directory: ./docker
|
||||
run: docker compose down
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -44,12 +44,5 @@ test.py
|
||||
.venv/
|
||||
uv.lock
|
||||
/test
|
||||
plugins.bak
|
||||
coverage.xml
|
||||
.coverage
|
||||
src/langbot/web/
|
||||
|
||||
# Build artifacts
|
||||
/dist
|
||||
/build
|
||||
*.egg-info
|
||||
|
||||
86
AGENTS.md
86
AGENTS.md
@@ -1,86 +0,0 @@
|
||||
# AGENTS.md
|
||||
|
||||
This file is for guiding code agents (like Claude Code, GitHub Copilot, OpenAI Codex, etc.) to work in LangBot project.
|
||||
|
||||
## Project Overview
|
||||
|
||||
LangBot is a open-source LLM native instant messaging bot development platform, aiming to provide an out-of-the-box IM robot development experience, with Agent, RAG, MCP and other LLM application functions, supporting global instant messaging platforms, and providing rich API interfaces, supporting custom development.
|
||||
|
||||
LangBot has a comprehensive frontend, all operations can be performed through the frontend. The project splited into these major parts:
|
||||
|
||||
- `./pkg`: The core python package of the project backend.
|
||||
- `./pkg/platform`: The platform module of the project, containing the logic of message platform adapters, bot managers, message session managers, etc.
|
||||
- `./pkg/provider`: The provider module of the project, containing the logic of LLM providers, tool providers, etc.
|
||||
- `./pkg/pipeline`: The pipeline module of the project, containing the logic of pipelines, stages, query pool, etc.
|
||||
- `./pkg/api`: The api module of the project, containing the http api controllers and services.
|
||||
- `./pkg/plugin`: LangBot bridge for connecting with plugin system.
|
||||
- `./libs`: Some SDKs we previously developed for the project, such as `qq_official_api`, `wecom_api`, etc.
|
||||
- `./templates`: Templates of config files, components, etc.
|
||||
- `./web`: Frontend codebase, built with Next.js + **shadcn** + **Tailwind CSS**.
|
||||
- `./docker`: docker-compose deployment files.
|
||||
|
||||
## Backend Development
|
||||
|
||||
We use `uv` to manage dependencies.
|
||||
|
||||
```bash
|
||||
pip install uv
|
||||
uv sync --dev
|
||||
```
|
||||
|
||||
Start the backend and run the project in development mode.
|
||||
|
||||
```bash
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
Then you can access the project at `http://127.0.0.1:5300`.
|
||||
|
||||
## Frontend Development
|
||||
|
||||
We use `pnpm` to manage dependencies.
|
||||
|
||||
```bash
|
||||
cd web
|
||||
cp .env.example .env
|
||||
pnpm install
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
Then you can access the project at `http://127.0.0.1:3000`.
|
||||
|
||||
## Plugin System Architecture
|
||||
|
||||
LangBot is composed of various internal components such as Large Language Model tools, commands, messaging platform adapters, LLM requesters, and more. To meet extensibility and flexibility requirements, we have implemented a production-grade plugin system.
|
||||
|
||||
Each plugin runs in an independent process, managed uniformly by the Plugin Runtime. It has two operating modes: `stdio` and `websocket`. When LangBot is started directly by users (not running in a container), it uses `stdio` mode, which is common for personal users or lightweight environments. When LangBot runs in a container, it uses `websocket` mode, designed specifically for production environments.
|
||||
|
||||
Plugin Runtime automatically starts each installed plugin and interacts through stdio. In plugin development scenarios, developers can use the lbp command-line tool to start plugins and connect to the running Runtime via WebSocket for debugging.
|
||||
|
||||
> Plugin SDK, CLI, Runtime, and entities definitions shared between LangBot and plugins are contained in the [`langbot-plugin-sdk`](https://github.com/langbot-app/langbot-plugin-sdk) repository.
|
||||
|
||||
## Some Development Tips and Standards
|
||||
|
||||
- LangBot is a global project, any comments in code should be in English, and user experience should be considered in all aspects.
|
||||
- Thus you should consider the i18n support in all aspects.
|
||||
- LangBot is widely adopted in both toC and toB scenarios, so you should consider the compatibility and security in all aspects.
|
||||
- If you were asked to make a commit, please follow the commit message format:
|
||||
- format: <type>(<scope>): <subject>
|
||||
- type: must be a specific type, such as feat (new feature), fix (bug fix), docs (documentation), style (code style), refactor (refactoring), perf (performance optimization), etc.
|
||||
- scope: the scope of the commit, such as the package name, the file name, the function name, the class name, the module name, etc.
|
||||
- subject: the subject of the commit, such as the description of the commit, the reason for the commit, the impact of the commit, etc.
|
||||
|
||||
## Some Principles
|
||||
|
||||
- Keep it simple, stupid.
|
||||
- Entities should not be multiplied unnecessarily
|
||||
- 八荣八耻
|
||||
|
||||
以瞎猜接口为耻,以认真查询为荣。
|
||||
以模糊执行为耻,以寻求确认为荣。
|
||||
以臆想业务为耻,以人类确认为荣。
|
||||
以创造接口为耻,以复用现有为荣。
|
||||
以跳过验证为耻,以主动测试为荣。
|
||||
以破坏架构为耻,以遵循规范为荣。
|
||||
以假装理解为耻,以诚实无知为荣。
|
||||
以盲目修改为耻,以谨慎重构为荣。
|
||||
23
README.md
23
README.md
@@ -31,16 +31,6 @@ LangBot 是一个开源的大语言模型原生即时通信机器人开发平台
|
||||
|
||||
## 📦 开始使用
|
||||
|
||||
#### 快速部署
|
||||
|
||||
使用 `uvx` 一键启动(需要先安装 [uv](https://docs.astral.sh/uv/getting-started/installation/)):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
访问 http://localhost:5300 即可开始使用。
|
||||
|
||||
#### Docker Compose 部署
|
||||
|
||||
```bash
|
||||
@@ -71,10 +61,6 @@ docker compose up -d
|
||||
|
||||
直接使用发行版运行,查看文档[手动部署](https://docs.langbot.app/zh/deploy/langbot/manual.html)。
|
||||
|
||||
#### Kubernetes 部署
|
||||
|
||||
参考 [Kubernetes 部署](./docker/README_K8S.md) 文档。
|
||||
|
||||
## 😎 保持更新
|
||||
|
||||
点击仓库右上角 Star 和 Watch 按钮,获取最新动态。
|
||||
@@ -126,7 +112,6 @@ docker compose up -d
|
||||
| [胜算云](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | 全球大模型都可调用(友情推荐) |
|
||||
| [优云智算](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | 大模型和 GPU 资源平台 |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | 大模型和 GPU 资源平台 |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | 大模型聚合平台,专注全球大模型接入 |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | 大模型聚合平台 |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps 平台 |
|
||||
@@ -139,7 +124,7 @@ docker compose up -d
|
||||
| [火山方舟](https://console.volcengine.com/ark/region:ark+cn-beijing/model?vendor=Bytedance&view=LIST_VIEW) | ✅ | 大模型聚合平台, LLMOps 平台 |
|
||||
| [ModelScope](https://modelscope.cn/docs/model-service/API-Inference/intro) | ✅ | 大模型聚合平台 |
|
||||
| [MCP](https://modelcontextprotocol.io/) | ✅ | 支持通过 MCP 协议获取工具 |
|
||||
| [百宝箱Tbox](https://www.tbox.cn/open) | ✅ | 蚂蚁百宝箱智能体平台,每月免费10亿大模型Token |
|
||||
| [百宝箱Tbox](https://www.tbox.cn/open) | ✅ | 蚂蚁百宝箱智能体平台,每月免费10亿大模型Token |
|
||||
|
||||
### TTS
|
||||
|
||||
@@ -162,9 +147,3 @@ docker compose up -d
|
||||
<a href="https://github.com/langbot-app/LangBot/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langbot-app/LangBot" />
|
||||
</a>
|
||||
|
||||
<!--
|
||||
## For Code Agents
|
||||
|
||||
If you're a code agent based on LLM, you should read [AGENTS.md](AGENTS.md) to understand the project structure and development process before you start any development.
|
||||
-->
|
||||
|
||||
15
README_EN.md
15
README_EN.md
@@ -25,16 +25,6 @@ LangBot is an open-source LLM native instant messaging robot development platfor
|
||||
|
||||
## 📦 Getting Started
|
||||
|
||||
#### Quick Start
|
||||
|
||||
Use `uvx` to start with one command (need to install [uv](https://docs.astral.sh/uv/getting-started/installation/)):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
Visit http://localhost:5300 to start using it.
|
||||
|
||||
#### Docker Compose Deployment
|
||||
|
||||
```bash
|
||||
@@ -65,10 +55,6 @@ Community contributed Zeabur template.
|
||||
|
||||
Directly use the released version to run, see the [Manual Deployment](https://docs.langbot.app/en/deploy/langbot/manual.html) documentation.
|
||||
|
||||
#### Kubernetes Deployment
|
||||
|
||||
Refer to the [Kubernetes Deployment](./docker/README_K8S.md) documentation.
|
||||
|
||||
## 😎 Stay Ahead
|
||||
|
||||
Click the Star and Watch button in the upper right corner of the repository to get the latest updates.
|
||||
@@ -119,7 +105,6 @@ Or visit the demo environment: https://demo.langbot.dev/
|
||||
| [CompShare](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | LLM and GPU resource platform |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps platform |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | LLM and GPU resource platform |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | LLM aggregation platform, dedicated to global LLMs |
|
||||
| [ShengSuanYun](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | LLM and GPU resource platform |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | LLM gateway(MaaS) |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
|
||||
15
README_JP.md
15
README_JP.md
@@ -25,16 +25,6 @@ LangBot は、エージェント、RAG、MCP などの LLM アプリケーショ
|
||||
|
||||
## 📦 始め方
|
||||
|
||||
#### クイックスタート
|
||||
|
||||
`uvx` を使用した迅速なデプロイ([uv](https://docs.astral.sh/uv/getting-started/installation/) が必要です):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
http://localhost:5300 にアクセスして使用を開始します。
|
||||
|
||||
#### Docker Compose デプロイ
|
||||
|
||||
```bash
|
||||
@@ -65,10 +55,6 @@ LangBotはBTPanelにリストされています。BTPanelをインストール
|
||||
|
||||
リリースバージョンを直接使用して実行します。[手動デプロイ](https://docs.langbot.app/en/deploy/langbot/manual.html)のドキュメントを参照してください。
|
||||
|
||||
#### Kubernetes デプロイ
|
||||
|
||||
[Kubernetes デプロイ](./docker/README_K8S.md) ドキュメントを参照してください。
|
||||
|
||||
## 😎 最新情報を入手
|
||||
|
||||
リポジトリの右上にある Star と Watch ボタンをクリックして、最新の更新を取得してください。
|
||||
@@ -118,7 +104,6 @@ LangBotはBTPanelにリストされています。BTPanelをインストール
|
||||
| [Zhipu AI](https://open.bigmodel.cn/) | ✅ | |
|
||||
| [CompShare](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | 大模型とGPUリソースプラットフォーム |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | 大模型とGPUリソースプラットフォーム |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | LLMゲートウェイ(MaaS) |
|
||||
| [ShengSuanYun](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | LLMとGPUリソースプラットフォーム |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | LLMゲートウェイ(MaaS) |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
|
||||
15
README_TW.md
15
README_TW.md
@@ -27,16 +27,6 @@ LangBot 是一個開源的大語言模型原生即時通訊機器人開發平台
|
||||
|
||||
## 📦 開始使用
|
||||
|
||||
#### 快速部署
|
||||
|
||||
使用 `uvx` 一鍵啟動(需要先安裝 [uv](https://docs.astral.sh/uv/getting-started/installation/) ):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
訪問 http://localhost:5300 即可開始使用。
|
||||
|
||||
#### Docker Compose 部署
|
||||
|
||||
```bash
|
||||
@@ -67,10 +57,6 @@ docker compose up -d
|
||||
|
||||
直接使用發行版運行,查看文件[手動部署](https://docs.langbot.app/zh/deploy/langbot/manual.html)。
|
||||
|
||||
#### Kubernetes 部署
|
||||
|
||||
參考 [Kubernetes 部署](./docker/README_K8S.md) 文件。
|
||||
|
||||
## 😎 保持更新
|
||||
|
||||
點擊倉庫右上角 Star 和 Watch 按鈕,獲取最新動態。
|
||||
@@ -121,7 +107,6 @@ docker compose up -d
|
||||
| [勝算雲](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | 大模型和 GPU 資源平台 |
|
||||
| [優雲智算](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | 大模型和 GPU 資源平台 |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | 大模型和 GPU 資源平台 |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | 大模型聚合平台,專注全球大模型接入 |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | 大模型聚合平台 |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps 平台 |
|
||||
|
||||
@@ -7,9 +7,16 @@ metadata:
|
||||
zh_Hans: 内置组件
|
||||
spec:
|
||||
components:
|
||||
ComponentTemplate:
|
||||
fromFiles:
|
||||
- pkg/provider/modelmgr/requester.yaml
|
||||
MessagePlatformAdapter:
|
||||
fromDirs:
|
||||
- path: pkg/platform/sources/
|
||||
LLMAPIRequester:
|
||||
fromDirs:
|
||||
- path: pkg/provider/modelmgr/requesters/
|
||||
Plugin:
|
||||
fromDirs:
|
||||
- path: plugins/
|
||||
maxDepth: 2
|
||||
@@ -1,629 +0,0 @@
|
||||
# LangBot Kubernetes 部署指南 / Kubernetes Deployment Guide
|
||||
|
||||
[简体中文](#简体中文) | [English](#english)
|
||||
|
||||
---
|
||||
|
||||
## 简体中文
|
||||
|
||||
### 概述
|
||||
|
||||
本指南提供了在 Kubernetes 集群中部署 LangBot 的完整步骤。Kubernetes 部署配置基于 `docker-compose.yaml`,适用于生产环境的容器化部署。
|
||||
|
||||
### 前置要求
|
||||
|
||||
- Kubernetes 集群(版本 1.19+)
|
||||
- `kubectl` 命令行工具已配置并可访问集群
|
||||
- 集群中有可用的存储类(StorageClass)用于持久化存储(可选但推荐)
|
||||
- 至少 2 vCPU 和 4GB RAM 的可用资源
|
||||
|
||||
### 架构说明
|
||||
|
||||
Kubernetes 部署包含以下组件:
|
||||
|
||||
1. **langbot**: 主应用服务
|
||||
- 提供 Web UI(端口 5300)
|
||||
- 处理平台 webhook(端口 2280-2290)
|
||||
- 数据持久化卷
|
||||
|
||||
2. **langbot-plugin-runtime**: 插件运行时服务
|
||||
- WebSocket 通信(端口 5400)
|
||||
- 插件数据持久化卷
|
||||
|
||||
3. **持久化存储**:
|
||||
- `langbot-data`: LangBot 主数据
|
||||
- `langbot-plugins`: 插件文件
|
||||
- `langbot-plugin-runtime-data`: 插件运行时数据
|
||||
|
||||
### 快速开始
|
||||
|
||||
#### 1. 下载部署文件
|
||||
|
||||
```bash
|
||||
# 克隆仓库
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot/docker
|
||||
|
||||
# 或直接下载 kubernetes.yaml
|
||||
wget https://raw.githubusercontent.com/langbot-app/LangBot/main/docker/kubernetes.yaml
|
||||
```
|
||||
|
||||
#### 2. 部署到 Kubernetes
|
||||
|
||||
```bash
|
||||
# 应用所有配置
|
||||
kubectl apply -f kubernetes.yaml
|
||||
|
||||
# 检查部署状态
|
||||
kubectl get all -n langbot
|
||||
|
||||
# 查看 Pod 日志
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
```
|
||||
|
||||
#### 3. 访问 LangBot
|
||||
|
||||
默认情况下,LangBot 服务使用 ClusterIP 类型,只能在集群内部访问。您可以选择以下方式之一来访问:
|
||||
|
||||
**选项 A: 端口转发(推荐用于测试)**
|
||||
|
||||
```bash
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
然后访问 http://localhost:5300
|
||||
|
||||
**选项 B: NodePort(适用于开发环境)**
|
||||
|
||||
编辑 `kubernetes.yaml`,取消注释 NodePort Service 部分,然后:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# 获取节点 IP
|
||||
kubectl get nodes -o wide
|
||||
# 访问 http://<NODE_IP>:30300
|
||||
```
|
||||
|
||||
**选项 C: LoadBalancer(适用于云环境)**
|
||||
|
||||
编辑 `kubernetes.yaml`,取消注释 LoadBalancer Service 部分,然后:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# 获取外部 IP
|
||||
kubectl get svc -n langbot langbot-loadbalancer
|
||||
# 访问 http://<EXTERNAL_IP>
|
||||
```
|
||||
|
||||
**选项 D: Ingress(推荐用于生产环境)**
|
||||
|
||||
确保集群中已安装 Ingress Controller(如 nginx-ingress),然后:
|
||||
|
||||
1. 编辑 `kubernetes.yaml` 中的 Ingress 配置
|
||||
2. 修改域名为您的实际域名
|
||||
3. 应用配置:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# 访问 http://langbot.yourdomain.com
|
||||
```
|
||||
|
||||
### 配置说明
|
||||
|
||||
#### 环境变量
|
||||
|
||||
在 `ConfigMap` 中配置环境变量:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: langbot-config
|
||||
namespace: langbot
|
||||
data:
|
||||
TZ: "Asia/Shanghai" # 修改为您的时区
|
||||
```
|
||||
|
||||
#### 存储配置
|
||||
|
||||
默认使用动态存储分配。如果您有特定的 StorageClass,请在 PVC 中指定:
|
||||
|
||||
```yaml
|
||||
spec:
|
||||
storageClassName: your-storage-class-name
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
```
|
||||
|
||||
#### 资源限制
|
||||
|
||||
根据您的需求调整资源限制:
|
||||
|
||||
```yaml
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "2000m"
|
||||
```
|
||||
|
||||
### 常用操作
|
||||
|
||||
#### 查看日志
|
||||
|
||||
```bash
|
||||
# 查看 LangBot 主服务日志
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
|
||||
# 查看插件运行时日志
|
||||
kubectl logs -n langbot -l app=langbot-plugin-runtime -f
|
||||
```
|
||||
|
||||
#### 重启服务
|
||||
|
||||
```bash
|
||||
# 重启 LangBot
|
||||
kubectl rollout restart deployment/langbot -n langbot
|
||||
|
||||
# 重启插件运行时
|
||||
kubectl rollout restart deployment/langbot-plugin-runtime -n langbot
|
||||
```
|
||||
|
||||
#### 更新镜像
|
||||
|
||||
```bash
|
||||
# 更新到最新版本
|
||||
kubectl set image deployment/langbot -n langbot langbot=rockchin/langbot:latest
|
||||
kubectl set image deployment/langbot-plugin-runtime -n langbot langbot-plugin-runtime=rockchin/langbot:latest
|
||||
|
||||
# 检查更新状态
|
||||
kubectl rollout status deployment/langbot -n langbot
|
||||
```
|
||||
|
||||
#### 扩容(不推荐)
|
||||
|
||||
注意:由于 LangBot 使用 ReadWriteOnce 的持久化存储,不支持多副本扩容。如需高可用,请考虑使用 ReadWriteMany 存储或其他架构方案。
|
||||
|
||||
#### 备份数据
|
||||
|
||||
```bash
|
||||
# 备份 PVC 数据
|
||||
kubectl exec -n langbot -it <langbot-pod-name> -- tar czf /tmp/backup.tar.gz /app/data
|
||||
kubectl cp langbot/<langbot-pod-name>:/tmp/backup.tar.gz ./backup.tar.gz
|
||||
```
|
||||
|
||||
### 卸载
|
||||
|
||||
```bash
|
||||
# 删除所有资源(保留 PVC)
|
||||
kubectl delete deployment,service,configmap -n langbot --all
|
||||
|
||||
# 删除 PVC(会删除数据)
|
||||
kubectl delete pvc -n langbot --all
|
||||
|
||||
# 删除命名空间
|
||||
kubectl delete namespace langbot
|
||||
```
|
||||
|
||||
### 故障排查
|
||||
|
||||
#### Pod 无法启动
|
||||
|
||||
```bash
|
||||
# 查看 Pod 状态
|
||||
kubectl get pods -n langbot
|
||||
|
||||
# 查看详细信息
|
||||
kubectl describe pod -n langbot <pod-name>
|
||||
|
||||
# 查看事件
|
||||
kubectl get events -n langbot --sort-by='.lastTimestamp'
|
||||
```
|
||||
|
||||
#### 存储问题
|
||||
|
||||
```bash
|
||||
# 检查 PVC 状态
|
||||
kubectl get pvc -n langbot
|
||||
|
||||
# 检查 PV
|
||||
kubectl get pv
|
||||
```
|
||||
|
||||
#### 网络访问问题
|
||||
|
||||
```bash
|
||||
# 检查 Service
|
||||
kubectl get svc -n langbot
|
||||
|
||||
# 检查端口转发
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
### 生产环境建议
|
||||
|
||||
1. **使用特定版本标签**:避免使用 `latest` 标签,使用具体版本号如 `rockchin/langbot:v1.0.0`
|
||||
2. **配置资源限制**:根据实际负载调整 CPU 和内存限制
|
||||
3. **使用 Ingress + TLS**:配置 HTTPS 访问和证书管理
|
||||
4. **配置监控和告警**:集成 Prometheus、Grafana 等监控工具
|
||||
5. **定期备份**:配置自动备份策略保护数据
|
||||
6. **使用专用 StorageClass**:为生产环境配置高性能存储
|
||||
7. **配置亲和性规则**:确保 Pod 调度到合适的节点
|
||||
|
||||
### 高级配置
|
||||
|
||||
#### 使用 Secrets 管理敏感信息
|
||||
|
||||
如果需要配置 API 密钥等敏感信息:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: langbot-secrets
|
||||
namespace: langbot
|
||||
type: Opaque
|
||||
data:
|
||||
api_key: <base64-encoded-value>
|
||||
```
|
||||
|
||||
然后在 Deployment 中引用:
|
||||
|
||||
```yaml
|
||||
env:
|
||||
- name: API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: langbot-secrets
|
||||
key: api_key
|
||||
```
|
||||
|
||||
#### 配置水平自动扩缩容(HPA)
|
||||
|
||||
注意:需要确保使用 ReadWriteMany 存储类型
|
||||
|
||||
```yaml
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: langbot-hpa
|
||||
namespace: langbot
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: langbot
|
||||
minReplicas: 1
|
||||
maxReplicas: 3
|
||||
metrics:
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: 70
|
||||
```
|
||||
|
||||
### 参考资源
|
||||
|
||||
- [LangBot 官方文档](https://docs.langbot.app)
|
||||
- [Docker 部署文档](https://docs.langbot.app/zh/deploy/langbot/docker.html)
|
||||
- [Kubernetes 官方文档](https://kubernetes.io/docs/)
|
||||
|
||||
---
|
||||
|
||||
## English
|
||||
|
||||
### Overview
|
||||
|
||||
This guide provides complete steps for deploying LangBot in a Kubernetes cluster. The Kubernetes deployment configuration is based on `docker-compose.yaml` and is suitable for production containerized deployments.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Kubernetes cluster (version 1.19+)
|
||||
- `kubectl` command-line tool configured with cluster access
|
||||
- Available StorageClass in the cluster for persistent storage (optional but recommended)
|
||||
- At least 2 vCPU and 4GB RAM of available resources
|
||||
|
||||
### Architecture
|
||||
|
||||
The Kubernetes deployment includes the following components:
|
||||
|
||||
1. **langbot**: Main application service
|
||||
- Provides Web UI (port 5300)
|
||||
- Handles platform webhooks (ports 2280-2290)
|
||||
- Data persistence volume
|
||||
|
||||
2. **langbot-plugin-runtime**: Plugin runtime service
|
||||
- WebSocket communication (port 5400)
|
||||
- Plugin data persistence volume
|
||||
|
||||
3. **Persistent Storage**:
|
||||
- `langbot-data`: LangBot main data
|
||||
- `langbot-plugins`: Plugin files
|
||||
- `langbot-plugin-runtime-data`: Plugin runtime data
|
||||
|
||||
### Quick Start
|
||||
|
||||
#### 1. Download Deployment Files
|
||||
|
||||
```bash
|
||||
# Clone repository
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot/docker
|
||||
|
||||
# Or download kubernetes.yaml directly
|
||||
wget https://raw.githubusercontent.com/langbot-app/LangBot/main/docker/kubernetes.yaml
|
||||
```
|
||||
|
||||
#### 2. Deploy to Kubernetes
|
||||
|
||||
```bash
|
||||
# Apply all configurations
|
||||
kubectl apply -f kubernetes.yaml
|
||||
|
||||
# Check deployment status
|
||||
kubectl get all -n langbot
|
||||
|
||||
# View Pod logs
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
```
|
||||
|
||||
#### 3. Access LangBot
|
||||
|
||||
By default, LangBot service uses ClusterIP type, accessible only within the cluster. Choose one of the following methods to access:
|
||||
|
||||
**Option A: Port Forwarding (Recommended for testing)**
|
||||
|
||||
```bash
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
Then visit http://localhost:5300
|
||||
|
||||
**Option B: NodePort (Suitable for development)**
|
||||
|
||||
Edit `kubernetes.yaml`, uncomment the NodePort Service section, then:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# Get node IP
|
||||
kubectl get nodes -o wide
|
||||
# Visit http://<NODE_IP>:30300
|
||||
```
|
||||
|
||||
**Option C: LoadBalancer (Suitable for cloud environments)**
|
||||
|
||||
Edit `kubernetes.yaml`, uncomment the LoadBalancer Service section, then:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# Get external IP
|
||||
kubectl get svc -n langbot langbot-loadbalancer
|
||||
# Visit http://<EXTERNAL_IP>
|
||||
```
|
||||
|
||||
**Option D: Ingress (Recommended for production)**
|
||||
|
||||
Ensure an Ingress Controller (e.g., nginx-ingress) is installed in the cluster, then:
|
||||
|
||||
1. Edit the Ingress configuration in `kubernetes.yaml`
|
||||
2. Change the domain to your actual domain
|
||||
3. Apply configuration:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# Visit http://langbot.yourdomain.com
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
Configure environment variables in ConfigMap:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: langbot-config
|
||||
namespace: langbot
|
||||
data:
|
||||
TZ: "Asia/Shanghai" # Change to your timezone
|
||||
```
|
||||
|
||||
#### Storage Configuration
|
||||
|
||||
Uses dynamic storage provisioning by default. If you have a specific StorageClass, specify it in PVC:
|
||||
|
||||
```yaml
|
||||
spec:
|
||||
storageClassName: your-storage-class-name
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
```
|
||||
|
||||
#### Resource Limits
|
||||
|
||||
Adjust resource limits based on your needs:
|
||||
|
||||
```yaml
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "2000m"
|
||||
```
|
||||
|
||||
### Common Operations
|
||||
|
||||
#### View Logs
|
||||
|
||||
```bash
|
||||
# View LangBot main service logs
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
|
||||
# View plugin runtime logs
|
||||
kubectl logs -n langbot -l app=langbot-plugin-runtime -f
|
||||
```
|
||||
|
||||
#### Restart Services
|
||||
|
||||
```bash
|
||||
# Restart LangBot
|
||||
kubectl rollout restart deployment/langbot -n langbot
|
||||
|
||||
# Restart plugin runtime
|
||||
kubectl rollout restart deployment/langbot-plugin-runtime -n langbot
|
||||
```
|
||||
|
||||
#### Update Images
|
||||
|
||||
```bash
|
||||
# Update to latest version
|
||||
kubectl set image deployment/langbot -n langbot langbot=rockchin/langbot:latest
|
||||
kubectl set image deployment/langbot-plugin-runtime -n langbot langbot-plugin-runtime=rockchin/langbot:latest
|
||||
|
||||
# Check update status
|
||||
kubectl rollout status deployment/langbot -n langbot
|
||||
```
|
||||
|
||||
#### Scaling (Not Recommended)
|
||||
|
||||
Note: Due to LangBot using ReadWriteOnce persistent storage, multi-replica scaling is not supported. For high availability, consider using ReadWriteMany storage or alternative architectures.
|
||||
|
||||
#### Backup Data
|
||||
|
||||
```bash
|
||||
# Backup PVC data
|
||||
kubectl exec -n langbot -it <langbot-pod-name> -- tar czf /tmp/backup.tar.gz /app/data
|
||||
kubectl cp langbot/<langbot-pod-name>:/tmp/backup.tar.gz ./backup.tar.gz
|
||||
```
|
||||
|
||||
### Uninstall
|
||||
|
||||
```bash
|
||||
# Delete all resources (keep PVCs)
|
||||
kubectl delete deployment,service,configmap -n langbot --all
|
||||
|
||||
# Delete PVCs (will delete data)
|
||||
kubectl delete pvc -n langbot --all
|
||||
|
||||
# Delete namespace
|
||||
kubectl delete namespace langbot
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
#### Pods Not Starting
|
||||
|
||||
```bash
|
||||
# Check Pod status
|
||||
kubectl get pods -n langbot
|
||||
|
||||
# View detailed information
|
||||
kubectl describe pod -n langbot <pod-name>
|
||||
|
||||
# View events
|
||||
kubectl get events -n langbot --sort-by='.lastTimestamp'
|
||||
```
|
||||
|
||||
#### Storage Issues
|
||||
|
||||
```bash
|
||||
# Check PVC status
|
||||
kubectl get pvc -n langbot
|
||||
|
||||
# Check PV
|
||||
kubectl get pv
|
||||
```
|
||||
|
||||
#### Network Access Issues
|
||||
|
||||
```bash
|
||||
# Check Service
|
||||
kubectl get svc -n langbot
|
||||
|
||||
# Test port forwarding
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
### Production Recommendations
|
||||
|
||||
1. **Use specific version tags**: Avoid using `latest` tag, use specific version like `rockchin/langbot:v1.0.0`
|
||||
2. **Configure resource limits**: Adjust CPU and memory limits based on actual load
|
||||
3. **Use Ingress + TLS**: Configure HTTPS access and certificate management
|
||||
4. **Configure monitoring and alerts**: Integrate monitoring tools like Prometheus, Grafana
|
||||
5. **Regular backups**: Configure automated backup strategy to protect data
|
||||
6. **Use dedicated StorageClass**: Configure high-performance storage for production
|
||||
7. **Configure affinity rules**: Ensure Pods are scheduled to appropriate nodes
|
||||
|
||||
### Advanced Configuration
|
||||
|
||||
#### Using Secrets for Sensitive Information
|
||||
|
||||
If you need to configure sensitive information like API keys:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: langbot-secrets
|
||||
namespace: langbot
|
||||
type: Opaque
|
||||
data:
|
||||
api_key: <base64-encoded-value>
|
||||
```
|
||||
|
||||
Then reference in Deployment:
|
||||
|
||||
```yaml
|
||||
env:
|
||||
- name: API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: langbot-secrets
|
||||
key: api_key
|
||||
```
|
||||
|
||||
#### Configure Horizontal Pod Autoscaling (HPA)
|
||||
|
||||
Note: Requires ReadWriteMany storage type
|
||||
|
||||
```yaml
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: langbot-hpa
|
||||
namespace: langbot
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: langbot
|
||||
minReplicas: 1
|
||||
maxReplicas: 3
|
||||
metrics:
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: 70
|
||||
```
|
||||
|
||||
### References
|
||||
|
||||
- [LangBot Official Documentation](https://docs.langbot.app)
|
||||
- [Docker Deployment Guide](https://docs.langbot.app/zh/deploy/langbot/docker.html)
|
||||
- [Kubernetes Official Documentation](https://kubernetes.io/docs/)
|
||||
@@ -1,74 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Quick test script for LangBot Kubernetes deployment
|
||||
# This script helps you test the Kubernetes deployment locally
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 LangBot Kubernetes Deployment Test Script"
|
||||
echo "=============================================="
|
||||
echo ""
|
||||
|
||||
# Check for kubectl
|
||||
if ! command -v kubectl &> /dev/null; then
|
||||
echo "❌ kubectl is not installed. Please install kubectl first."
|
||||
echo "Visit: https://kubernetes.io/docs/tasks/tools/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ kubectl is installed"
|
||||
|
||||
# Check if kubectl can connect to a cluster
|
||||
if ! kubectl cluster-info &> /dev/null; then
|
||||
echo ""
|
||||
echo "⚠️ No Kubernetes cluster found."
|
||||
echo ""
|
||||
echo "To test locally, you can use:"
|
||||
echo " - kind: https://kind.sigs.k8s.io/"
|
||||
echo " - minikube: https://minikube.sigs.k8s.io/"
|
||||
echo " - k3s: https://k3s.io/"
|
||||
echo ""
|
||||
echo "Example with kind:"
|
||||
echo " kind create cluster --name langbot-test"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Connected to Kubernetes cluster"
|
||||
kubectl cluster-info
|
||||
echo ""
|
||||
|
||||
# Ask user to confirm
|
||||
read -p "Do you want to deploy LangBot to this cluster? (y/N) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Deployment cancelled."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "📦 Deploying LangBot..."
|
||||
kubectl apply -f kubernetes.yaml
|
||||
|
||||
echo ""
|
||||
echo "⏳ Waiting for pods to be ready..."
|
||||
kubectl wait --for=condition=ready pod -l app=langbot -n langbot --timeout=300s
|
||||
kubectl wait --for=condition=ready pod -l app=langbot-plugin-runtime -n langbot --timeout=300s
|
||||
|
||||
echo ""
|
||||
echo "✅ Deployment complete!"
|
||||
echo ""
|
||||
echo "📊 Deployment status:"
|
||||
kubectl get all -n langbot
|
||||
|
||||
echo ""
|
||||
echo "🌐 To access LangBot Web UI, run:"
|
||||
echo " kubectl port-forward -n langbot svc/langbot 5300:5300"
|
||||
echo ""
|
||||
echo "Then visit: http://localhost:5300"
|
||||
echo ""
|
||||
echo "📝 To view logs:"
|
||||
echo " kubectl logs -n langbot -l app=langbot -f"
|
||||
echo ""
|
||||
echo "🗑️ To uninstall:"
|
||||
echo " kubectl delete namespace langbot"
|
||||
echo ""
|
||||
@@ -1,5 +1,3 @@
|
||||
# Docker Compose configuration for LangBot
|
||||
# For Kubernetes deployment, see kubernetes.yaml and README_K8S.md
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
@@ -7,7 +5,6 @@ services:
|
||||
langbot_plugin_runtime:
|
||||
image: rockchin/langbot:latest
|
||||
container_name: langbot_plugin_runtime
|
||||
platform: linux/amd64 # For Apple Silicon compatibility
|
||||
volumes:
|
||||
- ./data/plugins:/app/data/plugins
|
||||
ports:
|
||||
@@ -22,7 +19,6 @@ services:
|
||||
langbot:
|
||||
image: rockchin/langbot:latest
|
||||
container_name: langbot
|
||||
platform: linux/amd64 # For Apple Silicon compatibility
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./plugins:/app/plugins
|
||||
|
||||
@@ -1,400 +0,0 @@
|
||||
# Kubernetes Deployment for LangBot
|
||||
# This file provides Kubernetes deployment manifests for LangBot based on docker-compose.yaml
|
||||
#
|
||||
# Usage:
|
||||
# kubectl apply -f kubernetes.yaml
|
||||
#
|
||||
# Prerequisites:
|
||||
# - A Kubernetes cluster (1.19+)
|
||||
# - kubectl configured to communicate with your cluster
|
||||
# - (Optional) A StorageClass for dynamic volume provisioning
|
||||
#
|
||||
# Components:
|
||||
# - Namespace: langbot
|
||||
# - PersistentVolumeClaims for data persistence
|
||||
# - Deployments for langbot and langbot_plugin_runtime
|
||||
# - Services for network access
|
||||
# - ConfigMap for timezone configuration
|
||||
|
||||
---
|
||||
# Namespace
|
||||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: langbot
|
||||
labels:
|
||||
app: langbot
|
||||
|
||||
---
|
||||
# PersistentVolumeClaim for LangBot data
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: langbot-data
|
||||
namespace: langbot
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
# Uncomment and modify if you have a specific StorageClass
|
||||
# storageClassName: your-storage-class
|
||||
|
||||
---
|
||||
# PersistentVolumeClaim for LangBot plugins
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: langbot-plugins
|
||||
namespace: langbot
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
||||
# Uncomment and modify if you have a specific StorageClass
|
||||
# storageClassName: your-storage-class
|
||||
|
||||
---
|
||||
# PersistentVolumeClaim for Plugin Runtime data
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: langbot-plugin-runtime-data
|
||||
namespace: langbot
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
||||
# Uncomment and modify if you have a specific StorageClass
|
||||
# storageClassName: your-storage-class
|
||||
|
||||
---
|
||||
# ConfigMap for environment configuration
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: langbot-config
|
||||
namespace: langbot
|
||||
data:
|
||||
TZ: "Asia/Shanghai"
|
||||
PLUGIN__RUNTIME_WS_URL: "ws://langbot-plugin-runtime:5400/control/ws"
|
||||
|
||||
---
|
||||
# Deployment for LangBot Plugin Runtime
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: langbot-plugin-runtime
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot-plugin-runtime
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: langbot-plugin-runtime
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: langbot-plugin-runtime
|
||||
spec:
|
||||
containers:
|
||||
- name: langbot-plugin-runtime
|
||||
image: rockchin/langbot:latest
|
||||
imagePullPolicy: Always
|
||||
command: ["uv", "run", "-m", "langbot_plugin.cli.__init__", "rt"]
|
||||
ports:
|
||||
- containerPort: 5400
|
||||
name: runtime
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: TZ
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: langbot-config
|
||||
key: TZ
|
||||
volumeMounts:
|
||||
- name: plugin-data
|
||||
mountPath: /app/data/plugins
|
||||
resources:
|
||||
requests:
|
||||
memory: "512Mi"
|
||||
cpu: "250m"
|
||||
limits:
|
||||
memory: "2Gi"
|
||||
cpu: "1000m"
|
||||
# Liveness probe to restart container if it becomes unresponsive
|
||||
livenessProbe:
|
||||
tcpSocket:
|
||||
port: 5400
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
# Readiness probe to know when container is ready to accept traffic
|
||||
readinessProbe:
|
||||
tcpSocket:
|
||||
port: 5400
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
volumes:
|
||||
- name: plugin-data
|
||||
persistentVolumeClaim:
|
||||
claimName: langbot-plugin-runtime-data
|
||||
restartPolicy: Always
|
||||
|
||||
---
|
||||
# Service for LangBot Plugin Runtime
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: langbot-plugin-runtime
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot-plugin-runtime
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: langbot-plugin-runtime
|
||||
ports:
|
||||
- port: 5400
|
||||
targetPort: 5400
|
||||
protocol: TCP
|
||||
name: runtime
|
||||
|
||||
---
|
||||
# Deployment for LangBot
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: langbot
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: langbot
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: langbot
|
||||
spec:
|
||||
containers:
|
||||
- name: langbot
|
||||
image: rockchin/langbot:latest
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- containerPort: 5300
|
||||
name: web
|
||||
protocol: TCP
|
||||
- containerPort: 2280
|
||||
name: webhook-start
|
||||
protocol: TCP
|
||||
# Note: Kubernetes doesn't support port ranges directly in container ports
|
||||
# The webhook ports 2280-2290 are available, but we only expose the start of the range
|
||||
# If you need all ports exposed, consider using a Service with multiple port definitions
|
||||
env:
|
||||
- name: TZ
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: langbot-config
|
||||
key: TZ
|
||||
- name: PLUGIN__RUNTIME_WS_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: langbot-config
|
||||
key: PLUGIN__RUNTIME_WS_URL
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /app/data
|
||||
- name: plugins
|
||||
mountPath: /app/plugins
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "2000m"
|
||||
# Liveness probe to restart container if it becomes unresponsive
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 5300
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
# Readiness probe to know when container is ready to accept traffic
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 5300
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
volumes:
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: langbot-data
|
||||
- name: plugins
|
||||
persistentVolumeClaim:
|
||||
claimName: langbot-plugins
|
||||
restartPolicy: Always
|
||||
|
||||
---
|
||||
# Service for LangBot (ClusterIP for internal access)
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: langbot
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: langbot
|
||||
ports:
|
||||
- port: 5300
|
||||
targetPort: 5300
|
||||
protocol: TCP
|
||||
name: web
|
||||
- port: 2280
|
||||
targetPort: 2280
|
||||
protocol: TCP
|
||||
name: webhook-2280
|
||||
- port: 2281
|
||||
targetPort: 2281
|
||||
protocol: TCP
|
||||
name: webhook-2281
|
||||
- port: 2282
|
||||
targetPort: 2282
|
||||
protocol: TCP
|
||||
name: webhook-2282
|
||||
- port: 2283
|
||||
targetPort: 2283
|
||||
protocol: TCP
|
||||
name: webhook-2283
|
||||
- port: 2284
|
||||
targetPort: 2284
|
||||
protocol: TCP
|
||||
name: webhook-2284
|
||||
- port: 2285
|
||||
targetPort: 2285
|
||||
protocol: TCP
|
||||
name: webhook-2285
|
||||
- port: 2286
|
||||
targetPort: 2286
|
||||
protocol: TCP
|
||||
name: webhook-2286
|
||||
- port: 2287
|
||||
targetPort: 2287
|
||||
protocol: TCP
|
||||
name: webhook-2287
|
||||
- port: 2288
|
||||
targetPort: 2288
|
||||
protocol: TCP
|
||||
name: webhook-2288
|
||||
- port: 2289
|
||||
targetPort: 2289
|
||||
protocol: TCP
|
||||
name: webhook-2289
|
||||
- port: 2290
|
||||
targetPort: 2290
|
||||
protocol: TCP
|
||||
name: webhook-2290
|
||||
|
||||
---
|
||||
# Ingress for external access (Optional - requires Ingress Controller)
|
||||
# Uncomment and modify the following section if you want to expose LangBot via Ingress
|
||||
# apiVersion: networking.k8s.io/v1
|
||||
# kind: Ingress
|
||||
# metadata:
|
||||
# name: langbot-ingress
|
||||
# namespace: langbot
|
||||
# annotations:
|
||||
# # Uncomment and modify based on your ingress controller
|
||||
# # nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
# # cert-manager.io/cluster-issuer: letsencrypt-prod
|
||||
# spec:
|
||||
# ingressClassName: nginx # Change based on your ingress controller
|
||||
# rules:
|
||||
# - host: langbot.yourdomain.com # Change to your domain
|
||||
# http:
|
||||
# paths:
|
||||
# - path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# service:
|
||||
# name: langbot
|
||||
# port:
|
||||
# number: 5300
|
||||
# # Uncomment for TLS/HTTPS
|
||||
# # tls:
|
||||
# # - hosts:
|
||||
# # - langbot.yourdomain.com
|
||||
# # secretName: langbot-tls
|
||||
|
||||
---
|
||||
# Service for LangBot with LoadBalancer (Alternative to Ingress)
|
||||
# Uncomment the following if you want to expose LangBot directly via LoadBalancer
|
||||
# This is useful in cloud environments (AWS, GCP, Azure, etc.)
|
||||
# apiVersion: v1
|
||||
# kind: Service
|
||||
# metadata:
|
||||
# name: langbot-loadbalancer
|
||||
# namespace: langbot
|
||||
# labels:
|
||||
# app: langbot
|
||||
# spec:
|
||||
# type: LoadBalancer
|
||||
# selector:
|
||||
# app: langbot
|
||||
# ports:
|
||||
# - port: 80
|
||||
# targetPort: 5300
|
||||
# protocol: TCP
|
||||
# name: web
|
||||
# - port: 2280
|
||||
# targetPort: 2280
|
||||
# protocol: TCP
|
||||
# name: webhook-start
|
||||
# # Add more webhook ports as needed
|
||||
|
||||
---
|
||||
# Service for LangBot with NodePort (Alternative for exposing service)
|
||||
# Uncomment if you want to expose LangBot via NodePort
|
||||
# This is useful for testing or when LoadBalancer is not available
|
||||
# apiVersion: v1
|
||||
# kind: Service
|
||||
# metadata:
|
||||
# name: langbot-nodeport
|
||||
# namespace: langbot
|
||||
# labels:
|
||||
# app: langbot
|
||||
# spec:
|
||||
# type: NodePort
|
||||
# selector:
|
||||
# app: langbot
|
||||
# ports:
|
||||
# - port: 5300
|
||||
# targetPort: 5300
|
||||
# nodePort: 30300 # Must be in range 30000-32767
|
||||
# protocol: TCP
|
||||
# name: web
|
||||
# - port: 2280
|
||||
# targetPort: 2280
|
||||
# nodePort: 30280 # Must be in range 30000-32767
|
||||
# protocol: TCP
|
||||
# name: webhook
|
||||
@@ -1,291 +0,0 @@
|
||||
# API Key Authentication
|
||||
|
||||
LangBot now supports API key authentication for external systems to access its HTTP service API.
|
||||
|
||||
## Managing API Keys
|
||||
|
||||
API keys can be managed through the web interface:
|
||||
|
||||
1. Log in to the LangBot web interface
|
||||
2. Click the "API Keys" button at the bottom of the sidebar
|
||||
3. Create, view, copy, or delete API keys as needed
|
||||
|
||||
## Using API Keys
|
||||
|
||||
### Authentication Headers
|
||||
|
||||
Include your API key in the request header using one of these methods:
|
||||
|
||||
**Method 1: X-API-Key header (Recommended)**
|
||||
```
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
**Method 2: Authorization Bearer token**
|
||||
```
|
||||
Authorization: Bearer lbk_your_api_key_here
|
||||
```
|
||||
|
||||
## Available APIs
|
||||
|
||||
All existing LangBot APIs now support **both user token and API key authentication**. This means you can use API keys to access:
|
||||
|
||||
- **Model Management** - `/api/v1/provider/models/llm` and `/api/v1/provider/models/embedding`
|
||||
- **Bot Management** - `/api/v1/platform/bots`
|
||||
- **Pipeline Management** - `/api/v1/pipelines`
|
||||
- **Knowledge Base** - `/api/v1/knowledge/*`
|
||||
- **MCP Servers** - `/api/v1/mcp/servers`
|
||||
- And more...
|
||||
|
||||
### Authentication Methods
|
||||
|
||||
Each endpoint accepts **either**:
|
||||
1. **User Token** (via `Authorization: Bearer <user_jwt_token>`) - for web UI and authenticated users
|
||||
2. **API Key** (via `X-API-Key` or `Authorization: Bearer <api_key>`) - for external services
|
||||
|
||||
## Example: Model Management
|
||||
|
||||
### List All LLM Models
|
||||
|
||||
```http
|
||||
GET /api/v1/provider/models/llm
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"code": 0,
|
||||
"msg": "ok",
|
||||
"data": {
|
||||
"models": [
|
||||
{
|
||||
"uuid": "model-uuid",
|
||||
"name": "GPT-4",
|
||||
"description": "OpenAI GPT-4 model",
|
||||
"requester": "openai-chat-completions",
|
||||
"requester_config": {...},
|
||||
"abilities": ["chat", "vision"],
|
||||
"created_at": "2024-01-01T00:00:00",
|
||||
"updated_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Create a New LLM Model
|
||||
|
||||
```http
|
||||
POST /api/v1/provider/models/llm
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "My Custom Model",
|
||||
"description": "Description of the model",
|
||||
"requester": "openai-chat-completions",
|
||||
"requester_config": {
|
||||
"model": "gpt-4",
|
||||
"args": {}
|
||||
},
|
||||
"api_keys": [
|
||||
{
|
||||
"name": "default",
|
||||
"keys": ["sk-..."]
|
||||
}
|
||||
],
|
||||
"abilities": ["chat"],
|
||||
"extra_args": {}
|
||||
}
|
||||
```
|
||||
|
||||
### Update an LLM Model
|
||||
|
||||
```http
|
||||
PUT /api/v1/provider/models/llm/{model_uuid}
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "Updated Model Name",
|
||||
"description": "Updated description",
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### Delete an LLM Model
|
||||
|
||||
```http
|
||||
DELETE /api/v1/provider/models/llm/{model_uuid}
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
## Example: Bot Management
|
||||
|
||||
### List All Bots
|
||||
|
||||
```http
|
||||
GET /api/v1/platform/bots
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
### Create a New Bot
|
||||
|
||||
```http
|
||||
POST /api/v1/platform/bots
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "My Bot",
|
||||
"adapter": "telegram",
|
||||
"config": {...}
|
||||
}
|
||||
```
|
||||
|
||||
## Example: Pipeline Management
|
||||
|
||||
### List All Pipelines
|
||||
|
||||
```http
|
||||
GET /api/v1/pipelines
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
### Create a New Pipeline
|
||||
|
||||
```http
|
||||
POST /api/v1/pipelines
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "My Pipeline",
|
||||
"config": {...}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Responses
|
||||
|
||||
### 401 Unauthorized
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -1,
|
||||
"msg": "No valid authentication provided (user token or API key required)"
|
||||
}
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -1,
|
||||
"msg": "Invalid API key"
|
||||
}
|
||||
```
|
||||
|
||||
### 404 Not Found
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -1,
|
||||
"msg": "Resource not found"
|
||||
}
|
||||
```
|
||||
|
||||
### 500 Internal Server Error
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -2,
|
||||
"msg": "Error message details"
|
||||
}
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. **Keep API keys secure**: Store them securely and never commit them to version control
|
||||
2. **Use HTTPS**: Always use HTTPS in production to encrypt API key transmission
|
||||
3. **Rotate keys regularly**: Create new API keys periodically and delete old ones
|
||||
4. **Use descriptive names**: Give your API keys meaningful names to track their usage
|
||||
5. **Delete unused keys**: Remove API keys that are no longer needed
|
||||
6. **Use X-API-Key header**: Prefer using the `X-API-Key` header for clarity
|
||||
|
||||
## Example: Python Client
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
API_KEY = "lbk_your_api_key_here"
|
||||
BASE_URL = "http://your-langbot-server:5300"
|
||||
|
||||
headers = {
|
||||
"X-API-Key": API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
# List all models
|
||||
response = requests.get(f"{BASE_URL}/api/v1/provider/models/llm", headers=headers)
|
||||
models = response.json()["data"]["models"]
|
||||
|
||||
print(f"Found {len(models)} models")
|
||||
for model in models:
|
||||
print(f"- {model['name']}: {model['description']}")
|
||||
|
||||
# Create a new bot
|
||||
bot_data = {
|
||||
"name": "My Telegram Bot",
|
||||
"adapter": "telegram",
|
||||
"config": {
|
||||
"token": "your-telegram-token"
|
||||
}
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{BASE_URL}/api/v1/platform/bots",
|
||||
headers=headers,
|
||||
json=bot_data
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
bot_uuid = response.json()["data"]["uuid"]
|
||||
print(f"Bot created with UUID: {bot_uuid}")
|
||||
```
|
||||
|
||||
## Example: cURL
|
||||
|
||||
```bash
|
||||
# List all models
|
||||
curl -X GET \
|
||||
-H "X-API-Key: lbk_your_api_key_here" \
|
||||
http://your-langbot-server:5300/api/v1/provider/models/llm
|
||||
|
||||
# Create a new pipeline
|
||||
curl -X POST \
|
||||
-H "X-API-Key: lbk_your_api_key_here" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"name": "My Pipeline",
|
||||
"config": {...}
|
||||
}' \
|
||||
http://your-langbot-server:5300/api/v1/pipelines
|
||||
|
||||
# Get bot logs
|
||||
curl -X POST \
|
||||
-H "X-API-Key: lbk_your_api_key_here" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"from_index": -1,
|
||||
"max_count": 10
|
||||
}' \
|
||||
http://your-langbot-server:5300/api/v1/platform/bots/{bot_uuid}/logs
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- The same endpoints work for both the web UI (with user tokens) and external services (with API keys)
|
||||
- No need to learn different API paths - use the existing API documentation with API key authentication
|
||||
- All endpoints that previously required user authentication now also accept API keys
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
# LangBot PyPI Package Installation
|
||||
|
||||
## Quick Start with uvx
|
||||
|
||||
The easiest way to run LangBot is using `uvx` (recommended for quick testing):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
This will automatically download and run the latest version of LangBot.
|
||||
|
||||
## Install with pip/uv
|
||||
|
||||
You can also install LangBot as a regular Python package:
|
||||
|
||||
```bash
|
||||
# Using pip
|
||||
pip install langbot
|
||||
|
||||
# Using uv
|
||||
uv pip install langbot
|
||||
```
|
||||
|
||||
Then run it:
|
||||
|
||||
```bash
|
||||
langbot
|
||||
```
|
||||
|
||||
Or using Python module syntax:
|
||||
|
||||
```bash
|
||||
python -m langbot
|
||||
```
|
||||
|
||||
## Installation with Frontend
|
||||
|
||||
When published to PyPI, the LangBot package includes the pre-built frontend files. You don't need to build the frontend separately.
|
||||
|
||||
## Data Directory
|
||||
|
||||
When running LangBot as a package, it will create a `data/` directory in your current working directory to store configuration, logs, and other runtime data. You can run LangBot from any directory, and it will set up its data directory there.
|
||||
|
||||
## Command Line Options
|
||||
|
||||
LangBot supports the following command line options:
|
||||
|
||||
- `--standalone-runtime`: Use standalone plugin runtime
|
||||
- `--debug`: Enable debug mode
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
langbot --debug
|
||||
```
|
||||
|
||||
## Comparison with Other Installation Methods
|
||||
|
||||
### PyPI Package (uvx/pip)
|
||||
- **Pros**: Easy to install and update, no need to clone repository or build frontend
|
||||
- **Cons**: Less flexible for development/customization
|
||||
|
||||
### Docker
|
||||
- **Pros**: Isolated environment, easy deployment
|
||||
- **Cons**: Requires Docker
|
||||
|
||||
### Manual Source Installation
|
||||
- **Pros**: Full control, easy to customize and develop
|
||||
- **Cons**: Requires building frontend, managing dependencies manually
|
||||
|
||||
## Development
|
||||
|
||||
If you want to contribute or customize LangBot, you should still use the manual installation method by cloning the repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot
|
||||
uv sync
|
||||
cd web
|
||||
npm install
|
||||
npm run build
|
||||
cd ..
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
## Updating
|
||||
|
||||
To update to the latest version:
|
||||
|
||||
```bash
|
||||
# With pip
|
||||
pip install --upgrade langbot
|
||||
|
||||
# With uv
|
||||
uv pip install --upgrade langbot
|
||||
|
||||
# With uvx (automatically uses latest)
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
## System Requirements
|
||||
|
||||
- Python 3.10.1 or higher
|
||||
- Operating System: Linux, macOS, or Windows
|
||||
|
||||
## Differences from Source Installation
|
||||
|
||||
When running LangBot from the PyPI package (via uvx or pip), there are a few behavioral differences compared to running from source:
|
||||
|
||||
1. **Version Check**: The package version does not prompt for user input when the Python version is incompatible. It simply prints an error message and exits. This makes it compatible with non-interactive environments like containers and CI/CD.
|
||||
|
||||
2. **Working Directory**: The package version does not require being run from the LangBot project root. You can run `langbot` from any directory, and it will create a `data/` directory in your current working directory.
|
||||
|
||||
3. **Frontend Files**: The frontend is pre-built and included in the package, so you don't need to run `npm build` separately.
|
||||
|
||||
These differences are intentional to make the package more user-friendly and suitable for various deployment scenarios.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,8 +7,10 @@ import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
|
||||
|
||||
class AsyncCozeAPIClient:
|
||||
def __init__(self, api_key: str, api_base: str = 'https://api.coze.cn'):
|
||||
def __init__(self, api_key: str, api_base: str = "https://api.coze.cn"):
|
||||
self.api_key = api_key
|
||||
self.api_base = api_base
|
||||
self.session = None
|
||||
@@ -22,11 +24,13 @@ class AsyncCozeAPIClient:
|
||||
"""退出时自动关闭会话"""
|
||||
await self.close()
|
||||
|
||||
|
||||
|
||||
async def coze_session(self):
|
||||
"""确保HTTP session存在"""
|
||||
if self.session is None:
|
||||
connector = aiohttp.TCPConnector(
|
||||
ssl=False if self.api_base.startswith('http://') else True,
|
||||
ssl=False if self.api_base.startswith("http://") else True,
|
||||
limit=100,
|
||||
limit_per_host=30,
|
||||
keepalive_timeout=30,
|
||||
@@ -38,10 +42,12 @@ class AsyncCozeAPIClient:
|
||||
sock_read=120,
|
||||
)
|
||||
headers = {
|
||||
'Authorization': f'Bearer {self.api_key}',
|
||||
'Accept': 'text/event-stream',
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Accept": "text/event-stream",
|
||||
}
|
||||
self.session = aiohttp.ClientSession(headers=headers, timeout=timeout, connector=connector)
|
||||
self.session = aiohttp.ClientSession(
|
||||
headers=headers, timeout=timeout, connector=connector
|
||||
)
|
||||
return self.session
|
||||
|
||||
async def close(self):
|
||||
@@ -57,15 +63,15 @@ class AsyncCozeAPIClient:
|
||||
# 处理 Path 对象
|
||||
if isinstance(file, Path):
|
||||
if not file.exists():
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
raise ValueError(f"File not found: {file}")
|
||||
with open(file, "rb") as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件路径字符串
|
||||
elif isinstance(file, str):
|
||||
if not os.path.isfile(file):
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
raise ValueError(f"File not found: {file}")
|
||||
with open(file, "rb") as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件对象
|
||||
@@ -73,39 +79,43 @@ class AsyncCozeAPIClient:
|
||||
file = file.read()
|
||||
|
||||
session = await self.coze_session()
|
||||
url = f'{self.api_base}/v1/files/upload'
|
||||
url = f"{self.api_base}/v1/files/upload"
|
||||
|
||||
try:
|
||||
file_io = io.BytesIO(file)
|
||||
async with session.post(
|
||||
url,
|
||||
data={
|
||||
'file': file_io,
|
||||
"file": file_io,
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=60),
|
||||
) as response:
|
||||
if response.status == 401:
|
||||
raise Exception('Coze API 认证失败,请检查 API Key 是否正确')
|
||||
raise Exception("Coze API 认证失败,请检查 API Key 是否正确")
|
||||
|
||||
response_text = await response.text()
|
||||
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f'文件上传失败,状态码: {response.status}, 响应: {response_text}')
|
||||
raise Exception(
|
||||
f"文件上传失败,状态码: {response.status}, 响应: {response_text}"
|
||||
)
|
||||
try:
|
||||
result = await response.json()
|
||||
except json.JSONDecodeError:
|
||||
raise Exception(f'文件上传响应解析失败: {response_text}')
|
||||
raise Exception(f"文件上传响应解析失败: {response_text}")
|
||||
|
||||
if result.get('code') != 0:
|
||||
raise Exception(f'文件上传失败: {result.get("msg", "未知错误")}')
|
||||
if result.get("code") != 0:
|
||||
raise Exception(f"文件上传失败: {result.get('msg', '未知错误')}")
|
||||
|
||||
file_id = result['data']['id']
|
||||
file_id = result["data"]["id"]
|
||||
return file_id
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception('文件上传超时')
|
||||
raise Exception("文件上传超时")
|
||||
except Exception as e:
|
||||
raise Exception(f'文件上传失败: {str(e)}')
|
||||
raise Exception(f"文件上传失败: {str(e)}")
|
||||
|
||||
|
||||
async def chat_messages(
|
||||
self,
|
||||
@@ -129,21 +139,22 @@ class AsyncCozeAPIClient:
|
||||
timeout: 超时时间
|
||||
"""
|
||||
session = await self.coze_session()
|
||||
url = f'{self.api_base}/v3/chat'
|
||||
url = f"{self.api_base}/v3/chat"
|
||||
|
||||
payload = {
|
||||
'bot_id': bot_id,
|
||||
'user_id': user_id,
|
||||
'stream': stream,
|
||||
'auto_save_history': auto_save_history,
|
||||
"bot_id": bot_id,
|
||||
"user_id": user_id,
|
||||
"stream": stream,
|
||||
"auto_save_history": auto_save_history,
|
||||
}
|
||||
|
||||
if additional_messages:
|
||||
payload['additional_messages'] = additional_messages
|
||||
payload["additional_messages"] = additional_messages
|
||||
|
||||
params = {}
|
||||
if conversation_id:
|
||||
params['conversation_id'] = conversation_id
|
||||
params["conversation_id"] = conversation_id
|
||||
|
||||
|
||||
try:
|
||||
async with session.post(
|
||||
@@ -153,25 +164,29 @@ class AsyncCozeAPIClient:
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
) as response:
|
||||
if response.status == 401:
|
||||
raise Exception('Coze API 认证失败,请检查 API Key 是否正确')
|
||||
raise Exception("Coze API 认证失败,请检查 API Key 是否正确")
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f'Coze API 流式请求失败,状态码: {response.status}')
|
||||
raise Exception(f"Coze API 流式请求失败,状态码: {response.status}")
|
||||
|
||||
|
||||
async for chunk in response.content:
|
||||
chunk = chunk.decode('utf-8')
|
||||
chunk = chunk.decode("utf-8")
|
||||
if chunk != '\n':
|
||||
if chunk.startswith('event:'):
|
||||
chunk_type = chunk.replace('event:', '', 1).strip()
|
||||
elif chunk.startswith('data:'):
|
||||
chunk_data = chunk.replace('data:', '', 1).strip()
|
||||
if chunk.startswith("event:"):
|
||||
chunk_type = chunk.replace("event:", "", 1).strip()
|
||||
elif chunk.startswith("data:"):
|
||||
chunk_data = chunk.replace("data:", "", 1).strip()
|
||||
else:
|
||||
yield {
|
||||
'event': chunk_type,
|
||||
'data': json.loads(chunk_data) if chunk_data else {},
|
||||
} # 处理本地部署时,接口返回的data为空值
|
||||
yield {"event": chunk_type, "data": json.loads(chunk_data)}
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception(f'Coze API 流式请求超时 ({timeout}秒)')
|
||||
raise Exception(f"Coze API 流式请求超时 ({timeout}秒)")
|
||||
except Exception as e:
|
||||
raise Exception(f'Coze API 流式请求失败: {str(e)}')
|
||||
raise Exception(f"Coze API 流式请求失败: {str(e)}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
45
libs/dify_service_api/test.py
Normal file
45
libs/dify_service_api/test.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from v1 import client # type: ignore
|
||||
|
||||
import asyncio
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class TestDifyClient:
|
||||
async def test_chat_messages(self):
|
||||
cln = client.AsyncDifyServiceClient(api_key=os.getenv('DIFY_API_KEY'), base_url=os.getenv('DIFY_BASE_URL'))
|
||||
|
||||
async for chunk in cln.chat_messages(inputs={}, query='调用工具查看现在几点?', user='test'):
|
||||
print(json.dumps(chunk, ensure_ascii=False, indent=4))
|
||||
|
||||
async def test_upload_file(self):
|
||||
cln = client.AsyncDifyServiceClient(api_key=os.getenv('DIFY_API_KEY'), base_url=os.getenv('DIFY_BASE_URL'))
|
||||
|
||||
file_bytes = open('img.png', 'rb').read()
|
||||
|
||||
print(type(file_bytes))
|
||||
|
||||
file = ('img2.png', file_bytes, 'image/png')
|
||||
|
||||
resp = await cln.upload_file(file=file, user='test')
|
||||
print(json.dumps(resp, ensure_ascii=False, indent=4))
|
||||
|
||||
async def test_workflow_run(self):
|
||||
cln = client.AsyncDifyServiceClient(api_key=os.getenv('DIFY_API_KEY'), base_url=os.getenv('DIFY_BASE_URL'))
|
||||
|
||||
# resp = await cln.workflow_run(inputs={}, user="test")
|
||||
# # print(json.dumps(resp, ensure_ascii=False, indent=4))
|
||||
# print(resp)
|
||||
chunks = []
|
||||
|
||||
ignored_events = ['text_chunk']
|
||||
async for chunk in cln.workflow_run(inputs={}, user='test'):
|
||||
if chunk['event'] in ignored_events:
|
||||
continue
|
||||
chunks.append(chunk)
|
||||
print(json.dumps(chunks, ensure_ascii=False, indent=4))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
asyncio.run(TestDifyClient().test_chat_messages())
|
||||
@@ -5,8 +5,6 @@ import typing
|
||||
import json
|
||||
|
||||
from .errors import DifyAPIError
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
|
||||
class AsyncDifyServiceClient:
|
||||
@@ -111,23 +109,7 @@ class AsyncDifyServiceClient:
|
||||
user: str,
|
||||
timeout: float = 30.0,
|
||||
) -> str:
|
||||
# 处理 Path 对象
|
||||
if isinstance(file, Path):
|
||||
if not file.exists():
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件路径字符串
|
||||
elif isinstance(file, str):
|
||||
if not os.path.isfile(file):
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件对象
|
||||
elif hasattr(file, 'read'):
|
||||
file = file.read()
|
||||
"""上传文件"""
|
||||
async with httpx.AsyncClient(
|
||||
base_url=self.base_url,
|
||||
trust_env=True,
|
||||
@@ -139,8 +121,6 @@ class AsyncDifyServiceClient:
|
||||
headers={'Authorization': f'Bearer {self.api_key}'},
|
||||
files={
|
||||
'file': file,
|
||||
},
|
||||
data={
|
||||
'user': (None, user),
|
||||
},
|
||||
)
|
||||
@@ -188,69 +188,12 @@ class DingTalkClient:
|
||||
|
||||
if incoming_message.message_type == 'richText':
|
||||
data = incoming_message.rich_text_content.to_dict()
|
||||
|
||||
# 使用统一的结构化数据格式,保持顺序
|
||||
rich_content = {
|
||||
'Type': 'richText',
|
||||
'Elements': [], # 按顺序存储所有元素
|
||||
'SimpleContent': '', # 兼容字段:纯文本内容
|
||||
'SimplePicture': '', # 兼容字段:第一张图片
|
||||
}
|
||||
|
||||
# 先收集所有文本和图片占位符
|
||||
text_elements = []
|
||||
|
||||
# 解析富文本内容,保持原始顺序
|
||||
for item in data['richText']:
|
||||
# 处理文本内容
|
||||
if 'text' in item and item['text'] != '\n':
|
||||
element = {'Type': 'text', 'Content': item['text']}
|
||||
rich_content['Elements'].append(element)
|
||||
text_elements.append(item['text'])
|
||||
|
||||
# 检查是否是图片元素 - 根据钉钉API的实际结构调整
|
||||
# 钉钉富文本中的图片通常有特定标识,可能需要根据实际返回调整
|
||||
elif item.get('type') == 'picture':
|
||||
# 创建图片占位符
|
||||
element = {
|
||||
'Type': 'image_placeholder',
|
||||
}
|
||||
rich_content['Elements'].append(element)
|
||||
|
||||
# 获取并下载所有图片
|
||||
image_list = incoming_message.get_image_list()
|
||||
if image_list:
|
||||
new_elements = []
|
||||
image_index = 0
|
||||
|
||||
for element in rich_content['Elements']:
|
||||
if element['Type'] == 'image_placeholder':
|
||||
if image_index < len(image_list) and image_list[image_index]:
|
||||
image_url = await self.download_image(image_list[image_index])
|
||||
new_elements.append({'Type': 'image', 'Picture': image_url})
|
||||
image_index += 1
|
||||
else:
|
||||
# 如果没有对应的图片,保留占位符或跳过
|
||||
continue
|
||||
else:
|
||||
new_elements.append(element)
|
||||
|
||||
rich_content['Elements'] = new_elements
|
||||
|
||||
# 设置兼容字段
|
||||
all_texts = [elem['Content'] for elem in rich_content['Elements'] if elem.get('Type') == 'text']
|
||||
rich_content['SimpleContent'] = '\n'.join(all_texts) if all_texts else ''
|
||||
|
||||
all_images = [elem['Picture'] for elem in rich_content['Elements'] if elem.get('Type') == 'image']
|
||||
if all_images:
|
||||
rich_content['SimplePicture'] = all_images[0]
|
||||
rich_content['AllImages'] = all_images # 所有图片的列表
|
||||
|
||||
# 设置原始的 content 和 picture 字段以保持兼容
|
||||
message_data['Content'] = rich_content['SimpleContent']
|
||||
message_data['Rich_Content'] = rich_content
|
||||
if all_images:
|
||||
message_data['Picture'] = all_images[0]
|
||||
if 'text' in item:
|
||||
message_data['Content'] = item['text']
|
||||
if incoming_message.get_image_list()[0]:
|
||||
message_data['Picture'] = await self.download_image(incoming_message.get_image_list()[0])
|
||||
message_data['Type'] = 'text'
|
||||
|
||||
elif incoming_message.message_type == 'text':
|
||||
message_data['Content'] = incoming_message.get_text_list()[0]
|
||||
@@ -15,10 +15,6 @@ class DingTalkEvent(dict):
|
||||
def content(self):
|
||||
return self.get('Content', '')
|
||||
|
||||
@property
|
||||
def rich_content(self):
|
||||
return self.get('Rich_Content', '')
|
||||
|
||||
@property
|
||||
def incoming_message(self) -> Optional['dingtalk_stream.chatbot.ChatbotMessage']:
|
||||
return self.get('IncomingMessage')
|
||||
@@ -43,6 +39,7 @@ class DingTalkEvent(dict):
|
||||
def name(self):
|
||||
return self.get('Name', '')
|
||||
|
||||
|
||||
@property
|
||||
def conversation(self):
|
||||
return self.get('conversation_type', '')
|
||||
@@ -1,12 +1,12 @@
|
||||
# 微信公众号的加解密算法与企业微信一样,所以直接使用企业微信的加解密算法文件
|
||||
import time
|
||||
import traceback
|
||||
from langbot.libs.wecom_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
from libs.wecom_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
import xml.etree.ElementTree as ET
|
||||
from quart import Quart, request
|
||||
import hashlib
|
||||
from typing import Callable
|
||||
from langbot.libs.official_account_api.oaevent import OAEvent
|
||||
from .oaevent import OAEvent
|
||||
|
||||
import asyncio
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json
|
||||
from libs.wechatpad_api.util.http_util import post_json
|
||||
|
||||
|
||||
class ChatRoomApi:
|
||||
@@ -1,4 +1,4 @@
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json
|
||||
from libs.wechatpad_api.util.http_util import post_json
|
||||
import httpx
|
||||
import base64
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json, get_json
|
||||
from libs.wechatpad_api.util.http_util import post_json, get_json
|
||||
|
||||
|
||||
class LoginApi:
|
||||
@@ -1,4 +1,4 @@
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json
|
||||
from libs.wechatpad_api.util.http_util import post_json
|
||||
|
||||
|
||||
class MessageApi:
|
||||
@@ -1,4 +1,4 @@
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json, async_request, get_json
|
||||
from libs.wechatpad_api.util.http_util import post_json, async_request, get_json
|
||||
|
||||
|
||||
class UserApi:
|
||||
@@ -1,9 +1,9 @@
|
||||
from langbot.libs.wechatpad_api.api.login import LoginApi
|
||||
from langbot.libs.wechatpad_api.api.friend import FriendApi
|
||||
from langbot.libs.wechatpad_api.api.message import MessageApi
|
||||
from langbot.libs.wechatpad_api.api.user import UserApi
|
||||
from langbot.libs.wechatpad_api.api.downloadpai import DownloadApi
|
||||
from langbot.libs.wechatpad_api.api.chatroom import ChatRoomApi
|
||||
from libs.wechatpad_api.api.login import LoginApi
|
||||
from libs.wechatpad_api.api.friend import FriendApi
|
||||
from libs.wechatpad_api.api.message import MessageApi
|
||||
from libs.wechatpad_api.api.user import UserApi
|
||||
from libs.wechatpad_api.api.downloadpai import DownloadApi
|
||||
from libs.wechatpad_api.api.chatroom import ChatRoomApi
|
||||
|
||||
|
||||
class WeChatPadClient:
|
||||
@@ -16,7 +16,7 @@ import struct
|
||||
from Crypto.Cipher import AES
|
||||
import xml.etree.cElementTree as ET
|
||||
import socket
|
||||
from langbot.libs.wecom_ai_bot_api import ierror
|
||||
from libs.wecom_ai_bot_api import ierror
|
||||
|
||||
|
||||
"""
|
||||
290
libs/wecom_ai_bot_api/api.py
Normal file
290
libs/wecom_ai_bot_api/api.py
Normal file
@@ -0,0 +1,290 @@
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ET
|
||||
from urllib.parse import unquote
|
||||
import hashlib
|
||||
import traceback
|
||||
|
||||
import httpx
|
||||
from libs.wecom_ai_bot_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
from quart import Quart, request, Response, jsonify
|
||||
import langbot_plugin.api.entities.builtin.platform.message as platform_message
|
||||
import asyncio
|
||||
from libs.wecom_ai_bot_api import wecombotevent
|
||||
from typing import Callable
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
from pkg.platform.logger import EventLogger
|
||||
|
||||
|
||||
|
||||
class WecomBotClient:
|
||||
def __init__(self,Token:str,EnCodingAESKey:str,Corpid:str,logger:EventLogger):
|
||||
self.Token=Token
|
||||
self.EnCodingAESKey=EnCodingAESKey
|
||||
self.Corpid=Corpid
|
||||
self.ReceiveId = ''
|
||||
self.app = Quart(__name__)
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['POST','GET']
|
||||
)
|
||||
self._message_handlers = {
|
||||
'example': [],
|
||||
}
|
||||
self.user_stream_map = {}
|
||||
self.logger = logger
|
||||
self.generated_content = {}
|
||||
self.msg_id_map = {}
|
||||
|
||||
async def sha1_signature(token: str, timestamp: str, nonce: str, encrypt: str) -> str:
|
||||
raw = "".join(sorted([token, timestamp, nonce, encrypt]))
|
||||
return hashlib.sha1(raw.encode("utf-8")).hexdigest()
|
||||
|
||||
async def handle_callback_request(self):
|
||||
try:
|
||||
self.wxcpt=WXBizMsgCrypt(self.Token,self.EnCodingAESKey,'')
|
||||
|
||||
if request.method == "GET":
|
||||
|
||||
msg_signature = unquote(request.args.get("msg_signature", ""))
|
||||
timestamp = unquote(request.args.get("timestamp", ""))
|
||||
nonce = unquote(request.args.get("nonce", ""))
|
||||
echostr = unquote(request.args.get("echostr", ""))
|
||||
|
||||
if not all([msg_signature, timestamp, nonce, echostr]):
|
||||
await self.logger.error("请求参数缺失")
|
||||
return Response("缺少参数", status=400)
|
||||
|
||||
ret, decrypted_str = self.wxcpt.VerifyURL(msg_signature, timestamp, nonce, echostr)
|
||||
if ret != 0:
|
||||
|
||||
await self.logger.error("验证URL失败")
|
||||
return Response("验证失败", status=403)
|
||||
|
||||
return Response(decrypted_str, mimetype="text/plain")
|
||||
|
||||
elif request.method == "POST":
|
||||
msg_signature = unquote(request.args.get("msg_signature", ""))
|
||||
timestamp = unquote(request.args.get("timestamp", ""))
|
||||
nonce = unquote(request.args.get("nonce", ""))
|
||||
|
||||
try:
|
||||
timeout = 3
|
||||
interval = 0.1
|
||||
start_time = time.monotonic()
|
||||
encrypted_json = await request.get_json()
|
||||
encrypted_msg = encrypted_json.get("encrypt", "")
|
||||
if not encrypted_msg:
|
||||
await self.logger.error("请求体中缺少 'encrypt' 字段")
|
||||
|
||||
xml_post_data = f"<xml><Encrypt><![CDATA[{encrypted_msg}]]></Encrypt></xml>"
|
||||
ret, decrypted_xml = self.wxcpt.DecryptMsg(xml_post_data, msg_signature, timestamp, nonce)
|
||||
if ret != 0:
|
||||
await self.logger.error("解密失败")
|
||||
|
||||
|
||||
msg_json = json.loads(decrypted_xml)
|
||||
|
||||
from_user_id = msg_json.get("from", {}).get("userid")
|
||||
chatid = msg_json.get("chatid", "")
|
||||
|
||||
message_data = await self.get_message(msg_json)
|
||||
|
||||
|
||||
|
||||
if message_data:
|
||||
try:
|
||||
event = wecombotevent.WecomBotEvent(message_data)
|
||||
if event:
|
||||
await self._handle_message(event)
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
|
||||
start_time = time.time()
|
||||
try:
|
||||
if msg_json.get('chattype','') == 'single':
|
||||
if from_user_id in self.user_stream_map:
|
||||
stream_id = self.user_stream_map[from_user_id]
|
||||
else:
|
||||
stream_id =str(uuid.uuid4())
|
||||
self.user_stream_map[from_user_id] = stream_id
|
||||
|
||||
|
||||
else:
|
||||
|
||||
if chatid in self.user_stream_map:
|
||||
stream_id = self.user_stream_map[chatid]
|
||||
else:
|
||||
stream_id = str(uuid.uuid4())
|
||||
self.user_stream_map[chatid] = stream_id
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
while True:
|
||||
content = self.generated_content.pop(msg_json['msgid'],None)
|
||||
if content:
|
||||
reply_plain = {
|
||||
"msgtype": "stream",
|
||||
"stream": {
|
||||
"id": stream_id,
|
||||
"finish": True,
|
||||
"content": content
|
||||
}
|
||||
}
|
||||
reply_plain_str = json.dumps(reply_plain, ensure_ascii=False)
|
||||
|
||||
reply_timestamp = str(int(time.time()))
|
||||
ret, encrypt_text = self.wxcpt.EncryptMsg(reply_plain_str, nonce, reply_timestamp)
|
||||
if ret != 0:
|
||||
|
||||
await self.logger.error("加密失败"+str(ret))
|
||||
|
||||
|
||||
root = ET.fromstring(encrypt_text)
|
||||
encrypt = root.find("Encrypt").text
|
||||
resp = {
|
||||
"encrypt": encrypt,
|
||||
}
|
||||
return jsonify(resp), 200
|
||||
|
||||
if time.time() - start_time > timeout:
|
||||
break
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
if self.msg_id_map.get(message_data['msgid'], 1) == 3:
|
||||
await self.logger.error('请求失效:暂不支持智能机器人超过7秒的请求,如有需求,请联系 LangBot 团队。')
|
||||
return ''
|
||||
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
|
||||
|
||||
async def get_message(self,msg_json):
|
||||
message_data = {}
|
||||
|
||||
if msg_json.get('chattype','') == 'single':
|
||||
message_data['type'] = 'single'
|
||||
elif msg_json.get('chattype','') == 'group':
|
||||
message_data['type'] = 'group'
|
||||
|
||||
if msg_json.get('msgtype') == 'text':
|
||||
message_data['content'] = msg_json.get('text',{}).get('content')
|
||||
elif msg_json.get('msgtype') == 'image':
|
||||
picurl = msg_json.get('image', {}).get('url','')
|
||||
base64 = await self.download_url_to_base64(picurl,self.EnCodingAESKey)
|
||||
message_data['picurl'] = base64
|
||||
elif msg_json.get('msgtype') == 'mixed':
|
||||
items = msg_json.get('mixed', {}).get('msg_item', [])
|
||||
texts = []
|
||||
picurl = None
|
||||
for item in items:
|
||||
if item.get('msgtype') == 'text':
|
||||
texts.append(item.get('text', {}).get('content', ''))
|
||||
elif item.get('msgtype') == 'image' and picurl is None:
|
||||
picurl = item.get('image', {}).get('url')
|
||||
|
||||
if texts:
|
||||
message_data['content'] = "".join(texts) # 拼接所有 text
|
||||
if picurl:
|
||||
base64 = await self.download_url_to_base64(picurl,self.EnCodingAESKey)
|
||||
message_data['picurl'] = base64 # 只保留第一个 image
|
||||
|
||||
message_data['userid'] = msg_json.get('from', {}).get('userid', '')
|
||||
message_data['msgid'] = msg_json.get('msgid', '')
|
||||
|
||||
if msg_json.get('aibotid'):
|
||||
message_data['aibotid'] = msg_json.get('aibotid', '')
|
||||
|
||||
return message_data
|
||||
|
||||
async def _handle_message(self, event: wecombotevent.WecomBotEvent):
|
||||
"""
|
||||
处理消息事件。
|
||||
"""
|
||||
try:
|
||||
message_id = event.message_id
|
||||
if message_id in self.msg_id_map.keys():
|
||||
self.msg_id_map[message_id] += 1
|
||||
return
|
||||
self.msg_id_map[message_id] = 1
|
||||
msg_type = event.type
|
||||
if msg_type in self._message_handlers:
|
||||
for handler in self._message_handlers[msg_type]:
|
||||
await handler(event)
|
||||
except Exception:
|
||||
print(traceback.format_exc())
|
||||
|
||||
async def set_message(self, msg_id: str, content: str):
|
||||
self.generated_content[msg_id] = content
|
||||
|
||||
def on_message(self, msg_type: str):
|
||||
def decorator(func: Callable[[wecombotevent.WecomBotEvent], None]):
|
||||
if msg_type not in self._message_handlers:
|
||||
self._message_handlers[msg_type] = []
|
||||
self._message_handlers[msg_type].append(func)
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
async def download_url_to_base64(self, download_url, encoding_aes_key):
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(download_url)
|
||||
if response.status_code != 200:
|
||||
await self.logger.error(f'failed to get file: {response.text}')
|
||||
return None
|
||||
|
||||
encrypted_bytes = response.content
|
||||
|
||||
|
||||
aes_key = base64.b64decode(encoding_aes_key + "=") # base64 补齐
|
||||
iv = aes_key[:16]
|
||||
|
||||
|
||||
cipher = AES.new(aes_key, AES.MODE_CBC, iv)
|
||||
decrypted = cipher.decrypt(encrypted_bytes)
|
||||
|
||||
|
||||
pad_len = decrypted[-1]
|
||||
decrypted = decrypted[:-pad_len]
|
||||
|
||||
|
||||
if decrypted.startswith(b"\xff\xd8"): # JPEG
|
||||
mime_type = "image/jpeg"
|
||||
elif decrypted.startswith(b"\x89PNG"): # PNG
|
||||
mime_type = "image/png"
|
||||
elif decrypted.startswith((b"GIF87a", b"GIF89a")): # GIF
|
||||
mime_type = "image/gif"
|
||||
elif decrypted.startswith(b"BM"): # BMP
|
||||
mime_type = "image/bmp"
|
||||
elif decrypted.startswith(b"II*\x00") or decrypted.startswith(b"MM\x00*"): # TIFF
|
||||
mime_type = "image/tiff"
|
||||
else:
|
||||
mime_type = "application/octet-stream"
|
||||
|
||||
# 转 base64
|
||||
base64_str = base64.b64encode(decrypted).decode("utf-8")
|
||||
return f"data:{mime_type};base64,{base64_str}"
|
||||
|
||||
|
||||
async def run_task(self, host: str, port: int, *args, **kwargs):
|
||||
"""
|
||||
启动 Quart 应用。
|
||||
"""
|
||||
await self.app.run_task(host=host, port=port, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -22,26 +22,7 @@ class WecomBotEvent(dict):
|
||||
"""
|
||||
用户id
|
||||
"""
|
||||
return self.get('from', {}).get('userid', '') or self.get('userid', '')
|
||||
|
||||
@property
|
||||
def username(self) -> str:
|
||||
"""
|
||||
用户名称
|
||||
"""
|
||||
return (
|
||||
self.get('username', '')
|
||||
or self.get('from', {}).get('alias', '')
|
||||
or self.get('from', {}).get('name', '')
|
||||
or self.userid
|
||||
)
|
||||
|
||||
@property
|
||||
def chatname(self) -> str:
|
||||
"""
|
||||
群组名称
|
||||
"""
|
||||
return self.get('chatname', '') or str(self.chatid)
|
||||
return self.get('from', {}).get('userid', '')
|
||||
|
||||
@property
|
||||
def content(self) -> str:
|
||||
@@ -109,13 +109,14 @@ class WecomClient:
|
||||
async def send_image(self, user_id: str, agent_id: int, media_id: str):
|
||||
if not await self.check_access_token():
|
||||
self.access_token = await self.get_access_token(self.secret)
|
||||
|
||||
url = self.base_url + '/message/send?access_token=' + self.access_token
|
||||
url = self.base_url + '/media/upload?access_token=' + self.access_token
|
||||
async with httpx.AsyncClient() as client:
|
||||
params = {
|
||||
'touser': user_id,
|
||||
'msgtype': 'image',
|
||||
'toparty': '',
|
||||
'totag': '',
|
||||
'agentid': agent_id,
|
||||
'msgtype': 'image',
|
||||
'image': {
|
||||
'media_id': media_id,
|
||||
},
|
||||
@@ -124,13 +125,19 @@ class WecomClient:
|
||||
'enable_duplicate_check': 0,
|
||||
'duplicate_check_interval': 1800,
|
||||
}
|
||||
response = await client.post(url, json=params)
|
||||
data = response.json()
|
||||
try:
|
||||
response = await client.post(url, json=params)
|
||||
data = response.json()
|
||||
except Exception as e:
|
||||
await self.logger.error(f'发送图片失败:{data}')
|
||||
raise Exception('Failed to send image: ' + str(e))
|
||||
|
||||
# 企业微信错误码40014和42001,代表accesstoken问题
|
||||
if data['errcode'] == 40014 or data['errcode'] == 42001:
|
||||
self.access_token = await self.get_access_token(self.secret)
|
||||
return await self.send_image(user_id, agent_id, media_id)
|
||||
|
||||
if data['errcode'] != 0:
|
||||
await self.logger.error(f'发送图片失败:{data}')
|
||||
raise Exception('Failed to send image: ' + str(data))
|
||||
|
||||
async def send_private_msg(self, user_id: str, agent_id: int, content: str):
|
||||
@@ -333,3 +340,4 @@ class WecomClient:
|
||||
async def get_media_id(self, image: platform_message.Image):
|
||||
media_id = await self.upload_to_work(image=image)
|
||||
return media_id
|
||||
|
||||
118
main.py
118
main.py
@@ -1,3 +1,117 @@
|
||||
import langbot.__main__
|
||||
import asyncio
|
||||
import argparse
|
||||
# LangBot 终端启动入口
|
||||
# 在此层级解决依赖项检查。
|
||||
# LangBot/main.py
|
||||
|
||||
langbot.__main__.main()
|
||||
asciiart = r"""
|
||||
_ ___ _
|
||||
| | __ _ _ _ __ _| _ ) ___| |_
|
||||
| |__/ _` | ' \/ _` | _ \/ _ \ _|
|
||||
|____\__,_|_||_\__, |___/\___/\__|
|
||||
|___/
|
||||
|
||||
⭐️ Open Source 开源地址: https://github.com/langbot-app/LangBot
|
||||
📖 Documentation 文档地址: https://docs.langbot.app
|
||||
"""
|
||||
|
||||
|
||||
async def main_entry(loop: asyncio.AbstractEventLoop):
|
||||
parser = argparse.ArgumentParser(description='LangBot')
|
||||
parser.add_argument(
|
||||
'--standalone-runtime',
|
||||
action='store_true',
|
||||
help='Use standalone plugin runtime / 使用独立插件运行时',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument('--debug', action='store_true', help='Debug mode / 调试模式', default=False)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.standalone_runtime:
|
||||
from pkg.utils import platform
|
||||
|
||||
platform.standalone_runtime = True
|
||||
|
||||
if args.debug:
|
||||
from pkg.utils import constants
|
||||
|
||||
constants.debug_mode = True
|
||||
|
||||
print(asciiart)
|
||||
|
||||
import sys
|
||||
|
||||
# 检查依赖
|
||||
|
||||
from pkg.core.bootutils import deps
|
||||
|
||||
missing_deps = await deps.check_deps()
|
||||
|
||||
if missing_deps:
|
||||
print('以下依赖包未安装,将自动安装,请完成后重启程序:')
|
||||
print(
|
||||
'These dependencies are missing, they will be installed automatically, please restart the program after completion:'
|
||||
)
|
||||
for dep in missing_deps:
|
||||
print('-', dep)
|
||||
await deps.install_deps(missing_deps)
|
||||
print('已自动安装缺失的依赖包,请重启程序。')
|
||||
print('The missing dependencies have been installed automatically, please restart the program.')
|
||||
sys.exit(0)
|
||||
|
||||
# # 检查pydantic版本,如果没有 pydantic.v1,则把 pydantic 映射为 v1
|
||||
# import pydantic.version
|
||||
|
||||
# if pydantic.version.VERSION < '2.0':
|
||||
# import pydantic
|
||||
|
||||
# sys.modules['pydantic.v1'] = pydantic
|
||||
|
||||
# 检查配置文件
|
||||
|
||||
from pkg.core.bootutils import files
|
||||
|
||||
generated_files = await files.generate_files()
|
||||
|
||||
if generated_files:
|
||||
print('以下文件不存在,已自动生成:')
|
||||
print('Following files do not exist and have been automatically generated:')
|
||||
for file in generated_files:
|
||||
print('-', file)
|
||||
|
||||
from pkg.core import boot
|
||||
|
||||
await boot.main(loop)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import os
|
||||
import sys
|
||||
|
||||
# 必须大于 3.10.1
|
||||
if sys.version_info < (3, 10, 1):
|
||||
print('需要 Python 3.10.1 及以上版本,当前 Python 版本为:', sys.version)
|
||||
input('按任意键退出...')
|
||||
print('Your Python version is not supported. Please exit the program by pressing any key.')
|
||||
exit(1)
|
||||
|
||||
# Check if the current directory is the LangBot project root directory
|
||||
invalid_pwd = False
|
||||
|
||||
if not os.path.exists('main.py'):
|
||||
invalid_pwd = True
|
||||
else:
|
||||
with open('main.py', 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
if 'LangBot/main.py' not in content:
|
||||
invalid_pwd = True
|
||||
if invalid_pwd:
|
||||
print('请在 LangBot 项目根目录下以命令形式运行此程序。')
|
||||
input('按任意键退出...')
|
||||
print('Please run this program in the LangBot project root directory in command form.')
|
||||
print('Press any key to exit...')
|
||||
exit(1)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
loop.run_until_complete(main_entry(loop))
|
||||
|
||||
@@ -9,9 +9,6 @@ from quart.typing import RouteCallable
|
||||
|
||||
from ....core import app
|
||||
|
||||
# Maximum file upload size limit (10MB)
|
||||
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB
|
||||
|
||||
|
||||
preregistered_groups: list[type[RouterGroup]] = []
|
||||
"""Pre-registered list of RouterGroup"""
|
||||
@@ -34,8 +31,6 @@ class AuthType(enum.Enum):
|
||||
|
||||
NONE = 'none'
|
||||
USER_TOKEN = 'user-token'
|
||||
API_KEY = 'api-key'
|
||||
USER_TOKEN_OR_API_KEY = 'user-token-or-api-key'
|
||||
|
||||
|
||||
class RouterGroup(abc.ABC):
|
||||
@@ -89,65 +84,6 @@ class RouterGroup(abc.ABC):
|
||||
except Exception as e:
|
||||
return self.http_status(401, -1, str(e))
|
||||
|
||||
elif auth_type == AuthType.API_KEY:
|
||||
# get API key from Authorization header or X-API-Key header
|
||||
api_key = quart.request.headers.get('X-API-Key', '')
|
||||
if not api_key:
|
||||
auth_header = quart.request.headers.get('Authorization', '')
|
||||
if auth_header.startswith('Bearer '):
|
||||
api_key = auth_header.replace('Bearer ', '')
|
||||
|
||||
if not api_key:
|
||||
return self.http_status(401, -1, 'No valid API key provided')
|
||||
|
||||
try:
|
||||
is_valid = await self.ap.apikey_service.verify_api_key(api_key)
|
||||
if not is_valid:
|
||||
return self.http_status(401, -1, 'Invalid API key')
|
||||
except Exception as e:
|
||||
return self.http_status(401, -1, str(e))
|
||||
|
||||
elif auth_type == AuthType.USER_TOKEN_OR_API_KEY:
|
||||
# Try API key first (check X-API-Key header)
|
||||
api_key = quart.request.headers.get('X-API-Key', '')
|
||||
|
||||
if api_key:
|
||||
# API key authentication
|
||||
try:
|
||||
is_valid = await self.ap.apikey_service.verify_api_key(api_key)
|
||||
if not is_valid:
|
||||
return self.http_status(401, -1, 'Invalid API key')
|
||||
except Exception as e:
|
||||
return self.http_status(401, -1, str(e))
|
||||
else:
|
||||
# Try user token authentication (Authorization header)
|
||||
token = quart.request.headers.get('Authorization', '').replace('Bearer ', '')
|
||||
|
||||
if not token:
|
||||
return self.http_status(
|
||||
401, -1, 'No valid authentication provided (user token or API key required)'
|
||||
)
|
||||
|
||||
try:
|
||||
user_email = await self.ap.user_service.verify_jwt_token(token)
|
||||
|
||||
# check if this account exists
|
||||
user = await self.ap.user_service.get_user_by_email(user_email)
|
||||
if not user:
|
||||
return self.http_status(401, -1, 'User not found')
|
||||
|
||||
# check if f accepts user_email parameter
|
||||
if 'user_email' in f.__code__.co_varnames:
|
||||
kwargs['user_email'] = user_email
|
||||
except Exception:
|
||||
# If user token fails, maybe it's an API key in Authorization header
|
||||
try:
|
||||
is_valid = await self.ap.apikey_service.verify_api_key(token)
|
||||
if not is_valid:
|
||||
return self.http_status(401, -1, 'Invalid authentication credentials')
|
||||
except Exception as e:
|
||||
return self.http_status(401, -1, str(e))
|
||||
|
||||
try:
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
@@ -31,41 +31,19 @@ class FilesRouterGroup(group.RouterGroup):
|
||||
@self.route('/documents', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> quart.Response:
|
||||
request = quart.request
|
||||
|
||||
# Check file size limit before reading the file
|
||||
content_length = request.content_length
|
||||
if content_length and content_length > group.MAX_FILE_SIZE:
|
||||
return self.fail(400, 'File size exceeds 10MB limit. Please split large files into smaller parts.')
|
||||
|
||||
# get file bytes from 'file'
|
||||
files = await request.files
|
||||
if 'file' not in files:
|
||||
return self.fail(400, 'No file provided in request')
|
||||
|
||||
file = files['file']
|
||||
file = (await request.files)['file']
|
||||
assert isinstance(file, quart.datastructures.FileStorage)
|
||||
|
||||
file_bytes = await asyncio.to_thread(file.stream.read)
|
||||
|
||||
# Double-check actual file size after reading
|
||||
if len(file_bytes) > group.MAX_FILE_SIZE:
|
||||
return self.fail(400, 'File size exceeds 10MB limit. Please split large files into smaller parts.')
|
||||
|
||||
# Split filename and extension properly
|
||||
if '.' in file.filename:
|
||||
file_name, extension = file.filename.rsplit('.', 1)
|
||||
else:
|
||||
file_name = file.filename
|
||||
extension = ''
|
||||
extension = file.filename.split('.')[-1]
|
||||
file_name = file.filename.split('.')[0]
|
||||
|
||||
# check if file name contains '/' or '\'
|
||||
if '/' in file_name or '\\' in file_name:
|
||||
return self.fail(400, 'File name contains invalid characters')
|
||||
|
||||
file_key = file_name + '_' + str(uuid.uuid4())[:8]
|
||||
if extension:
|
||||
file_key += '.' + extension
|
||||
|
||||
file_key = file_name + '_' + str(uuid.uuid4())[:8] + '.' + extension
|
||||
# save file to storage
|
||||
await self.ap.storage_mgr.storage_provider.save(file_key, file_bytes)
|
||||
return self.success(
|
||||
48
pkg/api/http/controller/groups/pipelines/pipelines.py
Normal file
48
pkg/api/http/controller/groups/pipelines/pipelines.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import quart
|
||||
|
||||
from ... import group
|
||||
|
||||
|
||||
@group.group_class('pipelines', '/api/v1/pipelines')
|
||||
class PipelinesRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET', 'POST'])
|
||||
async def _() -> str:
|
||||
if quart.request.method == 'GET':
|
||||
sort_by = quart.request.args.get('sort_by', 'created_at')
|
||||
sort_order = quart.request.args.get('sort_order', 'DESC')
|
||||
return self.success(
|
||||
data={'pipelines': await self.ap.pipeline_service.get_pipelines(sort_by, sort_order)}
|
||||
)
|
||||
elif quart.request.method == 'POST':
|
||||
json_data = await quart.request.json
|
||||
|
||||
pipeline_uuid = await self.ap.pipeline_service.create_pipeline(json_data)
|
||||
|
||||
return self.success(data={'uuid': pipeline_uuid})
|
||||
|
||||
@self.route('/_/metadata', methods=['GET'])
|
||||
async def _() -> str:
|
||||
return self.success(data={'configs': await self.ap.pipeline_service.get_pipeline_metadata()})
|
||||
|
||||
@self.route('/<pipeline_uuid>', methods=['GET', 'PUT', 'DELETE'])
|
||||
async def _(pipeline_uuid: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
pipeline = await self.ap.pipeline_service.get_pipeline(pipeline_uuid)
|
||||
|
||||
if pipeline is None:
|
||||
return self.http_status(404, -1, 'pipeline not found')
|
||||
|
||||
return self.success(data={'pipeline': pipeline})
|
||||
elif quart.request.method == 'PUT':
|
||||
json_data = await quart.request.json
|
||||
|
||||
await self.ap.pipeline_service.update_pipeline(pipeline_uuid, json_data)
|
||||
|
||||
return self.success()
|
||||
elif quart.request.method == 'DELETE':
|
||||
await self.ap.pipeline_service.delete_pipeline(pipeline_uuid)
|
||||
|
||||
return self.success()
|
||||
229
pkg/api/http/controller/groups/pipelines/websocket.py
Normal file
229
pkg/api/http/controller/groups/pipelines/websocket.py
Normal file
@@ -0,0 +1,229 @@
|
||||
"""流水线调试 WebSocket 路由
|
||||
|
||||
提供基于 WebSocket 的实时双向通信,用于流水线调试。
|
||||
支持 person 和 group 两种会话类型的隔离。
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
import quart
|
||||
|
||||
from ... import group
|
||||
from ....service.websocket_pool import WebSocketConnection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def handle_client_event(connection: WebSocketConnection, message: dict, ap):
|
||||
"""处理客户端发送的事件
|
||||
|
||||
Args:
|
||||
connection: WebSocket 连接对象
|
||||
message: 客户端消息 {'type': 'xxx', 'data': {...}}
|
||||
ap: Application 实例
|
||||
"""
|
||||
event_type = message.get('type')
|
||||
data = message.get('data', {})
|
||||
|
||||
pipeline_uuid = connection.pipeline_uuid
|
||||
session_type = connection.session_type
|
||||
|
||||
try:
|
||||
webchat_adapter = ap.platform_mgr.webchat_proxy_bot.adapter
|
||||
|
||||
if event_type == 'send_message':
|
||||
# 发送消息到指定会话
|
||||
message_chain_obj = data.get('message_chain', [])
|
||||
client_message_id = data.get('client_message_id')
|
||||
|
||||
if not message_chain_obj:
|
||||
await connection.send('error', {'error': 'message_chain is required', 'error_code': 'INVALID_REQUEST'})
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f"Received send_message: pipeline={pipeline_uuid}, "
|
||||
f"session={session_type}, "
|
||||
f"client_msg_id={client_message_id}"
|
||||
)
|
||||
|
||||
# 调用 webchat_adapter.send_webchat_message
|
||||
# 消息将通过 reply_message_chunk 自动推送到 WebSocket
|
||||
result = None
|
||||
async for msg in webchat_adapter.send_webchat_message(
|
||||
pipeline_uuid=pipeline_uuid, session_type=session_type, message_chain_obj=message_chain_obj, is_stream=True
|
||||
):
|
||||
result = msg
|
||||
|
||||
# 发送确认
|
||||
if result:
|
||||
await connection.send(
|
||||
'message_sent',
|
||||
{
|
||||
'client_message_id': client_message_id,
|
||||
'server_message_id': result.get('id'),
|
||||
'timestamp': result.get('timestamp'),
|
||||
},
|
||||
)
|
||||
|
||||
elif event_type == 'load_history':
|
||||
# 加载指定会话的历史消息
|
||||
before_message_id = data.get('before_message_id')
|
||||
limit = data.get('limit', 50)
|
||||
|
||||
logger.info(f"Loading history: pipeline={pipeline_uuid}, session={session_type}, limit={limit}")
|
||||
|
||||
# 从对应会话获取历史消息
|
||||
messages = webchat_adapter.get_webchat_messages(pipeline_uuid, session_type)
|
||||
|
||||
# 简单分页:返回最后 limit 条
|
||||
if before_message_id:
|
||||
# TODO: 实现基于 message_id 的分页
|
||||
history_messages = messages[-limit:]
|
||||
else:
|
||||
history_messages = messages[-limit:] if len(messages) > limit else messages
|
||||
|
||||
await connection.send(
|
||||
'history', {'messages': history_messages, 'has_more': len(messages) > len(history_messages)}
|
||||
)
|
||||
|
||||
elif event_type == 'interrupt':
|
||||
# 中断消息
|
||||
message_id = data.get('message_id')
|
||||
logger.info(f"Interrupt requested: message_id={message_id}")
|
||||
|
||||
# TODO: 实现中断逻辑
|
||||
await connection.send('interrupted', {'message_id': message_id, 'partial_content': ''})
|
||||
|
||||
elif event_type == 'ping':
|
||||
# 心跳
|
||||
connection.last_ping = datetime.now()
|
||||
await connection.send('pong', {'timestamp': data.get('timestamp')})
|
||||
|
||||
else:
|
||||
logger.warning(f"Unknown event type: {event_type}")
|
||||
await connection.send('error', {'error': f'Unknown event type: {event_type}', 'error_code': 'UNKNOWN_EVENT'})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling event {event_type}: {e}", exc_info=True)
|
||||
await connection.send(
|
||||
'error',
|
||||
{'error': f'Internal server error: {str(e)}', 'error_code': 'INTERNAL_ERROR', 'details': {'event_type': event_type}},
|
||||
)
|
||||
|
||||
|
||||
@group.group_class('pipeline-websocket', '/api/v1/pipelines/<pipeline_uuid>/chat')
|
||||
class PipelineWebSocketRouterGroup(group.RouterGroup):
|
||||
"""流水线调试 WebSocket 路由组"""
|
||||
|
||||
async def initialize(self) -> None:
|
||||
@self.route('/ws')
|
||||
async def websocket_handler(pipeline_uuid: str):
|
||||
"""WebSocket 连接处理 - 会话隔离
|
||||
|
||||
连接流程:
|
||||
1. 客户端建立 WebSocket 连接
|
||||
2. 客户端发送 connect 事件(携带 session_type 和 token)
|
||||
3. 服务端验证并创建连接对象
|
||||
4. 进入消息循环,处理客户端事件
|
||||
5. 断开时清理连接
|
||||
|
||||
Args:
|
||||
pipeline_uuid: 流水线 UUID
|
||||
"""
|
||||
websocket = quart.websocket._get_current_object()
|
||||
connection_id = str(uuid.uuid4())
|
||||
session_key = None
|
||||
connection = None
|
||||
|
||||
try:
|
||||
# 1. 等待客户端发送 connect 事件
|
||||
first_message = await websocket.receive_json()
|
||||
|
||||
if first_message.get('type') != 'connect':
|
||||
await websocket.send_json(
|
||||
{'type': 'error', 'data': {'error': 'First message must be connect event', 'error_code': 'INVALID_HANDSHAKE'}}
|
||||
)
|
||||
await websocket.close(1008)
|
||||
return
|
||||
|
||||
connect_data = first_message.get('data', {})
|
||||
session_type = connect_data.get('session_type')
|
||||
token = connect_data.get('token')
|
||||
|
||||
# 验证参数
|
||||
if session_type not in ['person', 'group']:
|
||||
await websocket.send_json(
|
||||
{'type': 'error', 'data': {'error': 'session_type must be person or group', 'error_code': 'INVALID_SESSION_TYPE'}}
|
||||
)
|
||||
await websocket.close(1008)
|
||||
return
|
||||
|
||||
# 验证 token
|
||||
if not token:
|
||||
await websocket.send_json(
|
||||
{'type': 'error', 'data': {'error': 'token is required', 'error_code': 'MISSING_TOKEN'}}
|
||||
)
|
||||
await websocket.close(1008)
|
||||
return
|
||||
|
||||
# 验证用户身份
|
||||
try:
|
||||
user = await self.ap.user_service.verify_token(token)
|
||||
if not user:
|
||||
await websocket.send_json({'type': 'error', 'data': {'error': 'Unauthorized', 'error_code': 'UNAUTHORIZED'}})
|
||||
await websocket.close(1008)
|
||||
return
|
||||
except Exception as e:
|
||||
logger.error(f"Token verification failed: {e}")
|
||||
await websocket.send_json(
|
||||
{'type': 'error', 'data': {'error': 'Token verification failed', 'error_code': 'AUTH_ERROR'}}
|
||||
)
|
||||
await websocket.close(1008)
|
||||
return
|
||||
|
||||
# 2. 创建连接对象并加入连接池
|
||||
connection = WebSocketConnection(
|
||||
connection_id=connection_id,
|
||||
websocket=websocket,
|
||||
pipeline_uuid=pipeline_uuid,
|
||||
session_type=session_type,
|
||||
created_at=datetime.now(),
|
||||
last_ping=datetime.now(),
|
||||
)
|
||||
|
||||
session_key = connection.session_key
|
||||
ws_pool = self.ap.ws_pool
|
||||
ws_pool.add_connection(connection)
|
||||
|
||||
# 3. 发送连接成功事件
|
||||
await connection.send(
|
||||
'connected', {'connection_id': connection_id, 'session_type': session_type, 'pipeline_uuid': pipeline_uuid}
|
||||
)
|
||||
|
||||
logger.info(f"WebSocket connected: {connection_id} [pipeline={pipeline_uuid}, session={session_type}]")
|
||||
|
||||
# 4. 进入消息处理循环
|
||||
while True:
|
||||
try:
|
||||
message = await websocket.receive_json()
|
||||
await handle_client_event(connection, message, self.ap)
|
||||
except asyncio.CancelledError:
|
||||
logger.info(f"WebSocket connection cancelled: {connection_id}")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Error receiving message from {connection_id}: {e}")
|
||||
break
|
||||
|
||||
except quart.exceptions.WebsocketDisconnected:
|
||||
logger.info(f"WebSocket disconnected: {connection_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"WebSocket error for {connection_id}: {e}", exc_info=True)
|
||||
finally:
|
||||
# 清理连接
|
||||
if connection and session_key:
|
||||
ws_pool = self.ap.ws_pool
|
||||
await ws_pool.remove_connection(connection_id, session_key)
|
||||
logger.info(f"WebSocket connection cleaned up: {connection_id}")
|
||||
@@ -1,7 +1,6 @@
|
||||
import quart
|
||||
import mimetypes
|
||||
|
||||
from ... import group
|
||||
from langbot.pkg.utils import importutil
|
||||
|
||||
|
||||
@group.group_class('adapters', '/api/v1/platform/adapters')
|
||||
@@ -32,6 +31,4 @@ class AdaptersRouterGroup(group.RouterGroup):
|
||||
if icon_path is None:
|
||||
return self.http_status(404, -1, 'icon not found')
|
||||
|
||||
return quart.Response(
|
||||
importutil.read_resource_file_bytes(icon_path), mimetype=mimetypes.guess_type(icon_path)[0]
|
||||
)
|
||||
return await quart.send_file(icon_path)
|
||||
@@ -6,7 +6,7 @@ from ... import group
|
||||
@group.group_class('bots', '/api/v1/platform/bots')
|
||||
class BotsRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET', 'POST'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('', methods=['GET', 'POST'])
|
||||
async def _() -> str:
|
||||
if quart.request.method == 'GET':
|
||||
return self.success(data={'bots': await self.ap.bot_service.get_bots()})
|
||||
@@ -15,7 +15,7 @@ class BotsRouterGroup(group.RouterGroup):
|
||||
bot_uuid = await self.ap.bot_service.create_bot(json_data)
|
||||
return self.success(data={'uuid': bot_uuid})
|
||||
|
||||
@self.route('/<bot_uuid>', methods=['GET', 'PUT', 'DELETE'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('/<bot_uuid>', methods=['GET', 'PUT', 'DELETE'])
|
||||
async def _(bot_uuid: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
bot = await self.ap.bot_service.get_bot(bot_uuid)
|
||||
@@ -30,7 +30,7 @@ class BotsRouterGroup(group.RouterGroup):
|
||||
await self.ap.bot_service.delete_bot(bot_uuid)
|
||||
return self.success()
|
||||
|
||||
@self.route('/<bot_uuid>/logs', methods=['POST'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('/<bot_uuid>/logs', methods=['POST'])
|
||||
async def _(bot_uuid: str) -> str:
|
||||
json_data = await quart.request.json
|
||||
from_index = json_data.get('from_index', -1)
|
||||
144
pkg/api/http/controller/groups/plugins.py
Normal file
144
pkg/api/http/controller/groups/plugins.py
Normal file
@@ -0,0 +1,144 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import quart
|
||||
|
||||
from .....core import taskmgr
|
||||
from .. import group
|
||||
from langbot_plugin.runtime.plugin.mgr import PluginInstallSource
|
||||
|
||||
|
||||
@group.group_class('plugins', '/api/v1/plugins')
|
||||
class PluginsRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
plugins = await self.ap.plugin_connector.list_plugins()
|
||||
|
||||
return self.success(data={'plugins': plugins})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>/upgrade',
|
||||
methods=['POST'],
|
||||
auth_type=group.AuthType.USER_TOKEN,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.upgrade_plugin(author, plugin_name, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name=f'plugin-upgrade-{plugin_name}',
|
||||
label=f'Upgrading plugin {plugin_name}',
|
||||
context=ctx,
|
||||
)
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>',
|
||||
methods=['GET', 'DELETE'],
|
||||
auth_type=group.AuthType.USER_TOKEN,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
plugin = await self.ap.plugin_connector.get_plugin_info(author, plugin_name)
|
||||
if plugin is None:
|
||||
return self.http_status(404, -1, 'plugin not found')
|
||||
return self.success(data={'plugin': plugin})
|
||||
elif quart.request.method == 'DELETE':
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.delete_plugin(author, plugin_name, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name=f'plugin-remove-{plugin_name}',
|
||||
label=f'Removing plugin {plugin_name}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>/config',
|
||||
methods=['GET', 'PUT'],
|
||||
auth_type=group.AuthType.USER_TOKEN,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> quart.Response:
|
||||
plugin = await self.ap.plugin_connector.get_plugin_info(author, plugin_name)
|
||||
if plugin is None:
|
||||
return self.http_status(404, -1, 'plugin not found')
|
||||
|
||||
if quart.request.method == 'GET':
|
||||
return self.success(data={'config': plugin['plugin_config']})
|
||||
elif quart.request.method == 'PUT':
|
||||
data = await quart.request.json
|
||||
|
||||
await self.ap.plugin_connector.set_plugin_config(author, plugin_name, data)
|
||||
|
||||
return self.success(data={})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>/icon',
|
||||
methods=['GET'],
|
||||
auth_type=group.AuthType.NONE,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> quart.Response:
|
||||
icon_data = await self.ap.plugin_connector.get_plugin_icon(author, plugin_name)
|
||||
icon_base64 = icon_data['plugin_icon_base64']
|
||||
mime_type = icon_data['mime_type']
|
||||
|
||||
icon_data = base64.b64decode(icon_base64)
|
||||
|
||||
return quart.Response(icon_data, mimetype=mime_type)
|
||||
|
||||
@self.route('/install/github', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
data = await quart.request.json
|
||||
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
short_source_str = data['source'][-8:]
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_mgr.install_plugin(data['source'], task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name='plugin-install-github',
|
||||
label=f'Installing plugin from github ...{short_source_str}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route('/install/marketplace', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
data = await quart.request.json
|
||||
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.install_plugin(PluginInstallSource.MARKETPLACE, data, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name='plugin-install-marketplace',
|
||||
label=f'Installing plugin from marketplace ...{data}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route('/install/local', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
file = (await quart.request.files).get('file')
|
||||
if file is None:
|
||||
return self.http_status(400, -1, 'file is required')
|
||||
|
||||
file_bytes = file.read()
|
||||
|
||||
data = {
|
||||
'plugin_file': file_bytes,
|
||||
}
|
||||
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.install_plugin(PluginInstallSource.LOCAL, data, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name='plugin-install-local',
|
||||
label=f'Installing plugin from local ...{file.filename}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
@@ -6,7 +6,7 @@ from ... import group
|
||||
@group.group_class('models/llm', '/api/v1/provider/models/llm')
|
||||
class LLMModelsRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET', 'POST'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('', methods=['GET', 'POST'])
|
||||
async def _() -> str:
|
||||
if quart.request.method == 'GET':
|
||||
return self.success(data={'models': await self.ap.llm_model_service.get_llm_models()})
|
||||
@@ -17,7 +17,7 @@ class LLMModelsRouterGroup(group.RouterGroup):
|
||||
|
||||
return self.success(data={'uuid': model_uuid})
|
||||
|
||||
@self.route('/<model_uuid>', methods=['GET', 'PUT', 'DELETE'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('/<model_uuid>', methods=['GET', 'PUT', 'DELETE'])
|
||||
async def _(model_uuid: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
model = await self.ap.llm_model_service.get_llm_model(model_uuid)
|
||||
@@ -37,7 +37,7 @@ class LLMModelsRouterGroup(group.RouterGroup):
|
||||
|
||||
return self.success()
|
||||
|
||||
@self.route('/<model_uuid>/test', methods=['POST'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('/<model_uuid>/test', methods=['POST'])
|
||||
async def _(model_uuid: str) -> str:
|
||||
json_data = await quart.request.json
|
||||
|
||||
@@ -49,7 +49,7 @@ class LLMModelsRouterGroup(group.RouterGroup):
|
||||
@group.group_class('models/embedding', '/api/v1/provider/models/embedding')
|
||||
class EmbeddingModelsRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET', 'POST'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('', methods=['GET', 'POST'])
|
||||
async def _() -> str:
|
||||
if quart.request.method == 'GET':
|
||||
return self.success(data={'models': await self.ap.embedding_models_service.get_embedding_models()})
|
||||
@@ -60,7 +60,7 @@ class EmbeddingModelsRouterGroup(group.RouterGroup):
|
||||
|
||||
return self.success(data={'uuid': model_uuid})
|
||||
|
||||
@self.route('/<model_uuid>', methods=['GET', 'PUT', 'DELETE'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('/<model_uuid>', methods=['GET', 'PUT', 'DELETE'])
|
||||
async def _(model_uuid: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
model = await self.ap.embedding_models_service.get_embedding_model(model_uuid)
|
||||
@@ -80,7 +80,7 @@ class EmbeddingModelsRouterGroup(group.RouterGroup):
|
||||
|
||||
return self.success()
|
||||
|
||||
@self.route('/<model_uuid>/test', methods=['POST'], auth_type=group.AuthType.USER_TOKEN_OR_API_KEY)
|
||||
@self.route('/<model_uuid>/test', methods=['POST'])
|
||||
async def _(model_uuid: str) -> str:
|
||||
json_data = await quart.request.json
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import quart
|
||||
import mimetypes
|
||||
|
||||
from ... import group
|
||||
from langbot.pkg.utils import importutil
|
||||
|
||||
|
||||
@group.group_class('provider/requesters', '/api/v1/provider/requesters')
|
||||
@@ -34,6 +32,4 @@ class RequestersRouterGroup(group.RouterGroup):
|
||||
if icon_path is None:
|
||||
return self.http_status(404, -1, 'icon not found')
|
||||
|
||||
return quart.Response(
|
||||
importutil.read_resource_file_bytes(icon_path), mimetype=mimetypes.guess_type(icon_path)[0]
|
||||
)
|
||||
return await quart.send_file(icon_path)
|
||||
@@ -13,6 +13,7 @@ class SystemRouterGroup(group.RouterGroup):
|
||||
data={
|
||||
'version': constants.semantic_version,
|
||||
'debug': constants.debug_mode,
|
||||
'enabled_platform_count': len(self.ap.platform_mgr.get_running_adapters()),
|
||||
'enable_marketplace': self.ap.instance_config.data.get('plugin', {}).get(
|
||||
'enable_marketplace', True
|
||||
),
|
||||
@@ -5,7 +5,6 @@ import os
|
||||
|
||||
import quart
|
||||
import quart_cors
|
||||
from werkzeug.exceptions import RequestEntityTooLarge
|
||||
|
||||
from ....core import app, entities as core_entities
|
||||
from ....utils import importutil
|
||||
@@ -16,14 +15,12 @@ from .groups import provider as groups_provider
|
||||
from .groups import platform as groups_platform
|
||||
from .groups import pipelines as groups_pipelines
|
||||
from .groups import knowledge as groups_knowledge
|
||||
from .groups import resources as groups_resources
|
||||
|
||||
importutil.import_modules_in_pkg(groups)
|
||||
importutil.import_modules_in_pkg(groups_provider)
|
||||
importutil.import_modules_in_pkg(groups_platform)
|
||||
importutil.import_modules_in_pkg(groups_pipelines)
|
||||
importutil.import_modules_in_pkg(groups_knowledge)
|
||||
importutil.import_modules_in_pkg(groups_resources)
|
||||
|
||||
|
||||
class HTTPController:
|
||||
@@ -36,20 +33,7 @@ class HTTPController:
|
||||
self.quart_app = quart.Quart(__name__)
|
||||
quart_cors.cors(self.quart_app, allow_origin='*')
|
||||
|
||||
# Set maximum content length to prevent large file uploads
|
||||
self.quart_app.config['MAX_CONTENT_LENGTH'] = group.MAX_FILE_SIZE
|
||||
|
||||
async def initialize(self) -> None:
|
||||
# Register custom error handler for file size limit
|
||||
@self.quart_app.errorhandler(RequestEntityTooLarge)
|
||||
async def handle_request_entity_too_large(e):
|
||||
return quart.jsonify(
|
||||
{
|
||||
'code': 400,
|
||||
'msg': 'File size exceeds 10MB limit. Please split large files into smaller parts.',
|
||||
}
|
||||
), 400
|
||||
|
||||
await self.register_routes()
|
||||
|
||||
async def run(self) -> None:
|
||||
@@ -86,9 +70,7 @@ class HTTPController:
|
||||
ginst = g(self.ap, self.quart_app)
|
||||
await ginst.initialize()
|
||||
|
||||
from ....utils import paths
|
||||
|
||||
frontend_path = paths.get_frontend_path()
|
||||
frontend_path = 'web/out'
|
||||
|
||||
@self.quart_app.route('/')
|
||||
async def index():
|
||||
@@ -1,14 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
|
||||
import sqlalchemy
|
||||
from langbot_plugin.api.entities.builtin.provider import message as provider_message
|
||||
|
||||
from ....core import app
|
||||
from ....entity.persistence import model as persistence_model
|
||||
from ....entity.persistence import pipeline as persistence_pipeline
|
||||
from ....provider.modelmgr import requester as model_requester
|
||||
from langbot_plugin.api.entities.builtin.provider import message as provider_message
|
||||
|
||||
|
||||
class LLMModelsService:
|
||||
@@ -105,18 +104,12 @@ class LLMModelsService:
|
||||
else:
|
||||
runtime_llm_model = await self.ap.model_mgr.init_runtime_llm_model(model_data)
|
||||
|
||||
# Mon Nov 10 2025: Commented for some providers may not support thinking parameter
|
||||
# # 有些模型厂商默认开启了思考功能,测试容易延迟
|
||||
# extra_args = model_data.get('extra_args', {})
|
||||
# if not extra_args or 'thinking' not in extra_args:
|
||||
# extra_args['thinking'] = {'type': 'disabled'}
|
||||
|
||||
await runtime_llm_model.requester.invoke_llm(
|
||||
query=None,
|
||||
model=runtime_llm_model,
|
||||
messages=[provider_message.Message(role='user', content='Hello, world! Please just reply a "Hello".')],
|
||||
messages=[provider_message.Message(role='user', content='Hello, world!')],
|
||||
funcs=[],
|
||||
# extra_args=extra_args,
|
||||
extra_args=model_data.get('extra_args', {}),
|
||||
)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user