mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 03:15:06 +08:00
Compare commits
203 Commits
v4.3.2
...
442c93193c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
442c93193c | ||
|
|
dbc09f46f4 | ||
|
|
cf43f09aff | ||
|
|
c3c51b0fbf | ||
|
|
2a87419fb2 | ||
|
|
8a42daa63f | ||
|
|
d91d98c9d4 | ||
|
|
2e82f2b2d1 | ||
|
|
9855b6d5bc | ||
|
|
403a721b94 | ||
|
|
f459c7017a | ||
|
|
c27ccb8475 | ||
|
|
abb2f7ae05 | ||
|
|
80606ed32c | ||
|
|
bc7c5fa864 | ||
|
|
ed0ea68037 | ||
|
|
6ac4dbc011 | ||
|
|
e642ffa5b3 | ||
|
|
6a24c951e0 | ||
|
|
58369480e2 | ||
|
|
43553e2c7d | ||
|
|
268ac8855a | ||
|
|
0f10cc62ec | ||
|
|
2ef47ebfb1 | ||
|
|
99f649c6b7 | ||
|
|
f25ac78538 | ||
|
|
cef24d8c4b | ||
|
|
7a10dfdac1 | ||
|
|
02892e57bb | ||
|
|
524c56a12b | ||
|
|
0e0d7cc7b8 | ||
|
|
1f877e2b8e | ||
|
|
8cd50fbdb4 | ||
|
|
42421d171e | ||
|
|
32215e9a3f | ||
|
|
dd1c7ffc39 | ||
|
|
b59bf62da5 | ||
|
|
f4c32f7b30 | ||
|
|
8844a5304d | ||
|
|
922ddd47f4 | ||
|
|
8c8702c6c9 | ||
|
|
70147fcf5e | ||
|
|
b3ee16e876 | ||
|
|
8d7976190d | ||
|
|
3edae3e678 | ||
|
|
81e411c558 | ||
|
|
dd2254203c | ||
|
|
f8658e2d77 | ||
|
|
021c3bbb94 | ||
|
|
0a64a96f65 | ||
|
|
48576dc46d | ||
|
|
12de0343b4 | ||
|
|
fcd34a9ff3 | ||
|
|
0dcf904d81 | ||
|
|
4fe92d8ece | ||
|
|
c893ffc177 | ||
|
|
a076ce5756 | ||
|
|
ad64e89a86 | ||
|
|
af82227dff | ||
|
|
8f2b177145 | ||
|
|
9a997fbcb0 | ||
|
|
17070471f7 | ||
|
|
cb48221ed3 | ||
|
|
68eb0290e0 | ||
|
|
913b9a24c4 | ||
|
|
61bc6a1dc2 | ||
|
|
4a84bf2355 | ||
|
|
2c2a89d9db | ||
|
|
c91e2f0efe | ||
|
|
411d082d2a | ||
|
|
d4e08a1765 | ||
|
|
b529d07479 | ||
|
|
d44df75e5c | ||
|
|
b74e07b608 | ||
|
|
ceb38d91b4 | ||
|
|
4a868afecd | ||
|
|
1cb9560663 | ||
|
|
8f878673ae | ||
|
|
74a5e37892 | ||
|
|
76a69ecc7e | ||
|
|
f06e3d3efa | ||
|
|
973e7bae42 | ||
|
|
94aa175c1a | ||
|
|
a0dec39905 | ||
|
|
777b766fff | ||
|
|
1adaa93034 | ||
|
|
9853eccd89 | ||
|
|
7699ba3cae | ||
|
|
9ac8b1a6fd | ||
|
|
f476c4724d | ||
|
|
3d12632c9f | ||
|
|
350e59fa6b | ||
|
|
b3d5b3fc8f | ||
|
|
4a02c531b2 | ||
|
|
2dd2abedde | ||
|
|
0d59c04151 | ||
|
|
08e0ede655 | ||
|
|
bcf89ca434 | ||
|
|
5e2f677d0b | ||
|
|
4df372052d | ||
|
|
2c5a0a00ba | ||
|
|
f3295b0fdd | ||
|
|
431d515c26 | ||
|
|
d9e6198992 | ||
|
|
3951cbf266 | ||
|
|
c47c4994ae | ||
|
|
a6072c2abb | ||
|
|
360422f25e | ||
|
|
f135c946bd | ||
|
|
750cc24900 | ||
|
|
46062bf4b9 | ||
|
|
869b2176a7 | ||
|
|
7138c101e3 | ||
|
|
04e26225cd | ||
|
|
f9f2de570f | ||
|
|
1dd598c7be | ||
|
|
c0f04e4f20 | ||
|
|
d3279b9823 | ||
|
|
2ad1f97e12 | ||
|
|
1046f3c2aa | ||
|
|
1afecf01e4 | ||
|
|
3ee7736361 | ||
|
|
0666778fea | ||
|
|
8df90558ab | ||
|
|
c1c03f11b4 | ||
|
|
da9afcd0ad | ||
|
|
bc1fbfa190 | ||
|
|
f3199dda20 | ||
|
|
4d0a28a1a7 | ||
|
|
76831579ad | ||
|
|
c2d752f9e9 | ||
|
|
4c0917556f | ||
|
|
e17b0cf5c5 | ||
|
|
f2647316a5 | ||
|
|
78cc157657 | ||
|
|
f576f990de | ||
|
|
254feb6a3a | ||
|
|
4c5139e9ff | ||
|
|
a055e37d3a | ||
|
|
bef5d6627b | ||
|
|
69767ebdb4 | ||
|
|
53ecd0933e | ||
|
|
d32f783392 | ||
|
|
4d3610cdf7 | ||
|
|
166eebabff | ||
|
|
9f2f1cd577 | ||
|
|
d86b884cab | ||
|
|
8345edd9f7 | ||
|
|
e3821b3f09 | ||
|
|
72ca62eae4 | ||
|
|
075091ed06 | ||
|
|
d0a3dee083 | ||
|
|
6ba9b6973d | ||
|
|
345eccf04c | ||
|
|
127a38b15c | ||
|
|
760db38c11 | ||
|
|
e4729337c8 | ||
|
|
7be226d3fa | ||
|
|
68372a4b7a | ||
|
|
d65f862c36 | ||
|
|
5fa75330cf | ||
|
|
547e3d098e | ||
|
|
0f39a31648 | ||
|
|
f1ddddfe00 | ||
|
|
4e61302156 | ||
|
|
9e3cf418ba | ||
|
|
3e29ec7892 | ||
|
|
f452742cd2 | ||
|
|
b560432b0b | ||
|
|
99e5478ced | ||
|
|
09dba91a37 | ||
|
|
18ec4adac9 | ||
|
|
8bedaa468a | ||
|
|
0ab366fcac | ||
|
|
d664039e54 | ||
|
|
6535ba4f72 | ||
|
|
3b181cff93 | ||
|
|
d1274366a0 | ||
|
|
35a4b0f55f | ||
|
|
399ebd36d7 | ||
|
|
a3552893aa | ||
|
|
b6cdf18c1a | ||
|
|
bd4c7f634d | ||
|
|
160ca540ab | ||
|
|
74c3a77ed1 | ||
|
|
0b527868bc | ||
|
|
0f35458cf7 | ||
|
|
70ad92ca16 | ||
|
|
c0d56aa905 | ||
|
|
ed869f7e81 | ||
|
|
ea42579374 | ||
|
|
72d701df3e | ||
|
|
1191b34fd4 | ||
|
|
ca3d3b2a66 | ||
|
|
2891708060 | ||
|
|
3f59bfac5c | ||
|
|
ee24582dd3 | ||
|
|
0ffb4d5792 | ||
|
|
5a6206f148 | ||
|
|
b1014313d6 | ||
|
|
fcc2f6a195 | ||
|
|
c8ffc79077 | ||
|
|
1a13a41168 |
11
.github/pull_request_template.md
vendored
11
.github/pull_request_template.md
vendored
@@ -2,6 +2,17 @@
|
||||
|
||||
> 请在此部分填写你实现/解决/优化的内容:
|
||||
> Summary of what you implemented/solved/optimized:
|
||||
>
|
||||
|
||||
### 更改前后对比截图 / Screenshots
|
||||
|
||||
> 请在此部分粘贴更改前后对比截图(可以是界面截图、控制台输出、对话截图等):
|
||||
> Please paste the screenshots of changes before and after here (can be interface screenshots, console output, conversation screenshots, etc.):
|
||||
>
|
||||
> 修改前 / Before:
|
||||
>
|
||||
> 修改后 / After:
|
||||
>
|
||||
|
||||
## 检查清单 / Checklist
|
||||
|
||||
|
||||
11
.github/workflows/build-docker-image.yml
vendored
11
.github/workflows/build-docker-image.yml
vendored
@@ -1,10 +1,9 @@
|
||||
name: Build Docker Image
|
||||
on:
|
||||
#防止fork乱用action设置只能手动触发构建
|
||||
workflow_dispatch:
|
||||
## 发布release的时候会自动构建
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
publish-docker-image:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -41,5 +40,9 @@ jobs:
|
||||
run: docker login --username=${{ secrets.DOCKER_USERNAME }} --password ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Create Buildx
|
||||
run: docker buildx create --name mybuilder --use
|
||||
- name: Build # image name: rockchin/langbot:<VERSION>
|
||||
run: docker buildx build --platform linux/arm64,linux/amd64 -t rockchin/langbot:${{ steps.check_version.outputs.version }} -t rockchin/langbot:latest . --push
|
||||
- name: Build for Release # only relase, exlude pre-release
|
||||
if: ${{ github.event.release.prerelease == false }}
|
||||
run: docker buildx build --platform linux/amd64 -t rockchin/langbot:${{ steps.check_version.outputs.version }} -t rockchin/langbot:latest . --push
|
||||
- name: Build for Pre-release # no update for latest tag
|
||||
if: ${{ github.event.release.prerelease == true }}
|
||||
run: docker buildx build --platform linux/amd64 -t rockchin/langbot:${{ steps.check_version.outputs.version }} . --push
|
||||
46
.github/workflows/publish-to-pypi.yml
vendored
Normal file
46
.github/workflows/publish-to-pypi.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Build and Publish to PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # Required for trusted publishing to PyPI
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
|
||||
- name: Build frontend
|
||||
run: |
|
||||
cd web
|
||||
npm install -g pnpm
|
||||
pnpm install
|
||||
pnpm build
|
||||
mkdir -p ../src/langbot/web/out
|
||||
cp -r out ../src/langbot/web/
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
version: "latest"
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
uv build
|
||||
|
||||
- name: Publish to PyPI
|
||||
run: |
|
||||
uv publish --token ${{ secrets.PYPI_TOKEN }}
|
||||
71
.github/workflows/run-tests.yml
vendored
Normal file
71
.github/workflows/run-tests.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: Unit Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, ready_for_review, synchronize]
|
||||
paths:
|
||||
- 'pkg/**'
|
||||
- 'tests/**'
|
||||
- '.github/workflows/run-tests.yml'
|
||||
- 'pyproject.toml'
|
||||
- 'run_tests.sh'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
paths:
|
||||
- 'pkg/**'
|
||||
- 'tests/**'
|
||||
- '.github/workflows/run-tests.yml'
|
||||
- 'pyproject.toml'
|
||||
- 'run_tests.sh'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run Unit Tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install uv
|
||||
run: |
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
uv sync --dev
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
bash run_tests.sh
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
if: matrix.python-version == '3.12'
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
flags: unit-tests
|
||||
name: unit-tests-coverage
|
||||
fail_ci_if_error: false
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Test Summary
|
||||
if: always()
|
||||
run: |
|
||||
echo "## Unit Tests Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Python Version: ${{ matrix.python-version }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Test Status: ${{ job.status }}" >> $GITHUB_STEP_SUMMARY
|
||||
108
.github/workflows/test-dev-image.yaml
vendored
Normal file
108
.github/workflows/test-dev-image.yaml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
name: Test Dev Image
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Build Dev Image"]
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
test-dev-image:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if the build workflow succeeded
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Update Docker Compose to use master tag
|
||||
working-directory: ./docker
|
||||
run: |
|
||||
# Replace 'latest' with 'master' tag for testing the dev image
|
||||
sed -i 's/rockchin\/langbot:latest/rockchin\/langbot:master/g' docker-compose.yaml
|
||||
echo "Updated docker-compose.yaml to use master tag:"
|
||||
cat docker-compose.yaml
|
||||
|
||||
- name: Start Docker Compose
|
||||
working-directory: ./docker
|
||||
run: docker compose up -d
|
||||
|
||||
- name: Wait and Test API
|
||||
run: |
|
||||
# Function to test API endpoint
|
||||
test_api() {
|
||||
echo "Testing API endpoint..."
|
||||
response=$(curl -s --connect-timeout 10 --max-time 30 -w "\n%{http_code}" http://localhost:5300/api/v1/system/info 2>&1)
|
||||
curl_exit_code=$?
|
||||
|
||||
if [ $curl_exit_code -ne 0 ]; then
|
||||
echo "Curl failed with exit code: $curl_exit_code"
|
||||
echo "Error: $response"
|
||||
return 1
|
||||
fi
|
||||
|
||||
http_code=$(echo "$response" | tail -n 1)
|
||||
response_body=$(echo "$response" | head -n -1)
|
||||
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "API is healthy! Response code: $http_code"
|
||||
echo "Response: $response_body"
|
||||
return 0
|
||||
else
|
||||
echo "API returned non-200 response: $http_code"
|
||||
echo "Response body: $response_body"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Wait 30 seconds before first attempt
|
||||
echo "Waiting 30 seconds for services to start..."
|
||||
sleep 30
|
||||
|
||||
# Try up to 3 times with 30-second intervals
|
||||
max_attempts=3
|
||||
attempt=1
|
||||
|
||||
while [ $attempt -le $max_attempts ]; do
|
||||
echo "Attempt $attempt of $max_attempts"
|
||||
|
||||
if test_api; then
|
||||
echo "Success! API is responding correctly."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ $attempt -lt $max_attempts ]; then
|
||||
echo "Retrying in 30 seconds..."
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
# All attempts failed
|
||||
echo "Failed to get healthy response after $max_attempts attempts"
|
||||
exit 1
|
||||
|
||||
- name: Show Container Logs on Failure
|
||||
if: failure()
|
||||
working-directory: ./docker
|
||||
run: |
|
||||
echo "=== Docker Compose Status ==="
|
||||
docker compose ps
|
||||
echo ""
|
||||
echo "=== LangBot Logs ==="
|
||||
docker compose logs langbot
|
||||
echo ""
|
||||
echo "=== Plugin Runtime Logs ==="
|
||||
docker compose logs langbot_plugin_runtime
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
working-directory: ./docker
|
||||
run: docker compose down
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -22,7 +22,7 @@ tips.py
|
||||
venv*
|
||||
bin/
|
||||
.vscode
|
||||
test_*
|
||||
/test_*
|
||||
venv/
|
||||
hugchat.json
|
||||
qcapi
|
||||
@@ -43,4 +43,13 @@ test.py
|
||||
/web_ui
|
||||
.venv/
|
||||
uv.lock
|
||||
/test
|
||||
/test
|
||||
plugins.bak
|
||||
coverage.xml
|
||||
.coverage
|
||||
src/langbot/web/
|
||||
|
||||
# Build artifacts
|
||||
/dist
|
||||
/build
|
||||
*.egg-info
|
||||
|
||||
86
AGENTS.md
Normal file
86
AGENTS.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# AGENTS.md
|
||||
|
||||
This file is for guiding code agents (like Claude Code, GitHub Copilot, OpenAI Codex, etc.) to work in LangBot project.
|
||||
|
||||
## Project Overview
|
||||
|
||||
LangBot is a open-source LLM native instant messaging bot development platform, aiming to provide an out-of-the-box IM robot development experience, with Agent, RAG, MCP and other LLM application functions, supporting global instant messaging platforms, and providing rich API interfaces, supporting custom development.
|
||||
|
||||
LangBot has a comprehensive frontend, all operations can be performed through the frontend. The project splited into these major parts:
|
||||
|
||||
- `./pkg`: The core python package of the project backend.
|
||||
- `./pkg/platform`: The platform module of the project, containing the logic of message platform adapters, bot managers, message session managers, etc.
|
||||
- `./pkg/provider`: The provider module of the project, containing the logic of LLM providers, tool providers, etc.
|
||||
- `./pkg/pipeline`: The pipeline module of the project, containing the logic of pipelines, stages, query pool, etc.
|
||||
- `./pkg/api`: The api module of the project, containing the http api controllers and services.
|
||||
- `./pkg/plugin`: LangBot bridge for connecting with plugin system.
|
||||
- `./libs`: Some SDKs we previously developed for the project, such as `qq_official_api`, `wecom_api`, etc.
|
||||
- `./templates`: Templates of config files, components, etc.
|
||||
- `./web`: Frontend codebase, built with Next.js + **shadcn** + **Tailwind CSS**.
|
||||
- `./docker`: docker-compose deployment files.
|
||||
|
||||
## Backend Development
|
||||
|
||||
We use `uv` to manage dependencies.
|
||||
|
||||
```bash
|
||||
pip install uv
|
||||
uv sync --dev
|
||||
```
|
||||
|
||||
Start the backend and run the project in development mode.
|
||||
|
||||
```bash
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
Then you can access the project at `http://127.0.0.1:5300`.
|
||||
|
||||
## Frontend Development
|
||||
|
||||
We use `pnpm` to manage dependencies.
|
||||
|
||||
```bash
|
||||
cd web
|
||||
cp .env.example .env
|
||||
pnpm install
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
Then you can access the project at `http://127.0.0.1:3000`.
|
||||
|
||||
## Plugin System Architecture
|
||||
|
||||
LangBot is composed of various internal components such as Large Language Model tools, commands, messaging platform adapters, LLM requesters, and more. To meet extensibility and flexibility requirements, we have implemented a production-grade plugin system.
|
||||
|
||||
Each plugin runs in an independent process, managed uniformly by the Plugin Runtime. It has two operating modes: `stdio` and `websocket`. When LangBot is started directly by users (not running in a container), it uses `stdio` mode, which is common for personal users or lightweight environments. When LangBot runs in a container, it uses `websocket` mode, designed specifically for production environments.
|
||||
|
||||
Plugin Runtime automatically starts each installed plugin and interacts through stdio. In plugin development scenarios, developers can use the lbp command-line tool to start plugins and connect to the running Runtime via WebSocket for debugging.
|
||||
|
||||
> Plugin SDK, CLI, Runtime, and entities definitions shared between LangBot and plugins are contained in the [`langbot-plugin-sdk`](https://github.com/langbot-app/langbot-plugin-sdk) repository.
|
||||
|
||||
## Some Development Tips and Standards
|
||||
|
||||
- LangBot is a global project, any comments in code should be in English, and user experience should be considered in all aspects.
|
||||
- Thus you should consider the i18n support in all aspects.
|
||||
- LangBot is widely adopted in both toC and toB scenarios, so you should consider the compatibility and security in all aspects.
|
||||
- If you were asked to make a commit, please follow the commit message format:
|
||||
- format: <type>(<scope>): <subject>
|
||||
- type: must be a specific type, such as feat (new feature), fix (bug fix), docs (documentation), style (code style), refactor (refactoring), perf (performance optimization), etc.
|
||||
- scope: the scope of the commit, such as the package name, the file name, the function name, the class name, the module name, etc.
|
||||
- subject: the subject of the commit, such as the description of the commit, the reason for the commit, the impact of the commit, etc.
|
||||
|
||||
## Some Principles
|
||||
|
||||
- Keep it simple, stupid.
|
||||
- Entities should not be multiplied unnecessarily
|
||||
- 八荣八耻
|
||||
|
||||
以瞎猜接口为耻,以认真查询为荣。
|
||||
以模糊执行为耻,以寻求确认为荣。
|
||||
以臆想业务为耻,以人类确认为荣。
|
||||
以创造接口为耻,以复用现有为荣。
|
||||
以跳过验证为耻,以主动测试为荣。
|
||||
以破坏架构为耻,以遵循规范为荣。
|
||||
以假装理解为耻,以诚实无知为荣。
|
||||
以盲目修改为耻,以谨慎重构为荣。
|
||||
25
README.md
25
README.md
@@ -31,11 +31,21 @@ LangBot 是一个开源的大语言模型原生即时通信机器人开发平台
|
||||
|
||||
## 📦 开始使用
|
||||
|
||||
#### 快速部署
|
||||
|
||||
使用 `uvx` 一键启动(需要先安装 [uv](https://docs.astral.sh/uv/getting-started/installation/)):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
访问 http://localhost:5300 即可开始使用。
|
||||
|
||||
#### Docker Compose 部署
|
||||
|
||||
```bash
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot
|
||||
cd LangBot/docker
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
@@ -61,6 +71,10 @@ docker compose up -d
|
||||
|
||||
直接使用发行版运行,查看文档[手动部署](https://docs.langbot.app/zh/deploy/langbot/manual.html)。
|
||||
|
||||
#### Kubernetes 部署
|
||||
|
||||
参考 [Kubernetes 部署](./docker/README_K8S.md) 文档。
|
||||
|
||||
## 😎 保持更新
|
||||
|
||||
点击仓库右上角 Star 和 Watch 按钮,获取最新动态。
|
||||
@@ -112,6 +126,7 @@ docker compose up -d
|
||||
| [胜算云](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | 全球大模型都可调用(友情推荐) |
|
||||
| [优云智算](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | 大模型和 GPU 资源平台 |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | 大模型和 GPU 资源平台 |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | 大模型聚合平台,专注全球大模型接入 |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | 大模型聚合平台 |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps 平台 |
|
||||
@@ -119,10 +134,12 @@ docker compose up -d
|
||||
| [LMStudio](https://lmstudio.ai/) | ✅ | 本地大模型运行平台 |
|
||||
| [GiteeAI](https://ai.gitee.com/) | ✅ | 大模型接口聚合平台 |
|
||||
| [SiliconFlow](https://siliconflow.cn/) | ✅ | 大模型聚合平台 |
|
||||
| [小马算力](https://www.tokenpony.cn/453z1) | ✅ | 大模型聚合平台 |
|
||||
| [阿里云百炼](https://bailian.console.aliyun.com/) | ✅ | 大模型聚合平台, LLMOps 平台 |
|
||||
| [火山方舟](https://console.volcengine.com/ark/region:ark+cn-beijing/model?vendor=Bytedance&view=LIST_VIEW) | ✅ | 大模型聚合平台, LLMOps 平台 |
|
||||
| [ModelScope](https://modelscope.cn/docs/model-service/API-Inference/intro) | ✅ | 大模型聚合平台 |
|
||||
| [MCP](https://modelcontextprotocol.io/) | ✅ | 支持通过 MCP 协议获取工具 |
|
||||
| [百宝箱Tbox](https://www.tbox.cn/open) | ✅ | 蚂蚁百宝箱智能体平台,每月免费10亿大模型Token |
|
||||
|
||||
### TTS
|
||||
|
||||
@@ -145,3 +162,9 @@ docker compose up -d
|
||||
<a href="https://github.com/langbot-app/LangBot/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langbot-app/LangBot" />
|
||||
</a>
|
||||
|
||||
<!--
|
||||
## For Code Agents
|
||||
|
||||
If you're a code agent based on LLM, you should read [AGENTS.md](AGENTS.md) to understand the project structure and development process before you start any development.
|
||||
-->
|
||||
|
||||
17
README_EN.md
17
README_EN.md
@@ -25,11 +25,21 @@ LangBot is an open-source LLM native instant messaging robot development platfor
|
||||
|
||||
## 📦 Getting Started
|
||||
|
||||
#### Quick Start
|
||||
|
||||
Use `uvx` to start with one command (need to install [uv](https://docs.astral.sh/uv/getting-started/installation/)):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
Visit http://localhost:5300 to start using it.
|
||||
|
||||
#### Docker Compose Deployment
|
||||
|
||||
```bash
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot
|
||||
cd LangBot/docker
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
@@ -55,6 +65,10 @@ Community contributed Zeabur template.
|
||||
|
||||
Directly use the released version to run, see the [Manual Deployment](https://docs.langbot.app/en/deploy/langbot/manual.html) documentation.
|
||||
|
||||
#### Kubernetes Deployment
|
||||
|
||||
Refer to the [Kubernetes Deployment](./docker/README_K8S.md) documentation.
|
||||
|
||||
## 😎 Stay Ahead
|
||||
|
||||
Click the Star and Watch button in the upper right corner of the repository to get the latest updates.
|
||||
@@ -105,6 +119,7 @@ Or visit the demo environment: https://demo.langbot.dev/
|
||||
| [CompShare](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | LLM and GPU resource platform |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps platform |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | LLM and GPU resource platform |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | LLM aggregation platform, dedicated to global LLMs |
|
||||
| [ShengSuanYun](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | LLM and GPU resource platform |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | LLM gateway(MaaS) |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
|
||||
17
README_JP.md
17
README_JP.md
@@ -25,11 +25,21 @@ LangBot は、エージェント、RAG、MCP などの LLM アプリケーショ
|
||||
|
||||
## 📦 始め方
|
||||
|
||||
#### クイックスタート
|
||||
|
||||
`uvx` を使用した迅速なデプロイ([uv](https://docs.astral.sh/uv/getting-started/installation/) が必要です):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
http://localhost:5300 にアクセスして使用を開始します。
|
||||
|
||||
#### Docker Compose デプロイ
|
||||
|
||||
```bash
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot
|
||||
cd LangBot/docker
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
@@ -55,6 +65,10 @@ LangBotはBTPanelにリストされています。BTPanelをインストール
|
||||
|
||||
リリースバージョンを直接使用して実行します。[手動デプロイ](https://docs.langbot.app/en/deploy/langbot/manual.html)のドキュメントを参照してください。
|
||||
|
||||
#### Kubernetes デプロイ
|
||||
|
||||
[Kubernetes デプロイ](./docker/README_K8S.md) ドキュメントを参照してください。
|
||||
|
||||
## 😎 最新情報を入手
|
||||
|
||||
リポジトリの右上にある Star と Watch ボタンをクリックして、最新の更新を取得してください。
|
||||
@@ -104,6 +118,7 @@ LangBotはBTPanelにリストされています。BTPanelをインストール
|
||||
| [Zhipu AI](https://open.bigmodel.cn/) | ✅ | |
|
||||
| [CompShare](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | 大模型とGPUリソースプラットフォーム |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | 大模型とGPUリソースプラットフォーム |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | LLMゲートウェイ(MaaS) |
|
||||
| [ShengSuanYun](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | LLMとGPUリソースプラットフォーム |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | LLMゲートウェイ(MaaS) |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
|
||||
17
README_TW.md
17
README_TW.md
@@ -27,11 +27,21 @@ LangBot 是一個開源的大語言模型原生即時通訊機器人開發平台
|
||||
|
||||
## 📦 開始使用
|
||||
|
||||
#### 快速部署
|
||||
|
||||
使用 `uvx` 一鍵啟動(需要先安裝 [uv](https://docs.astral.sh/uv/getting-started/installation/) ):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
訪問 http://localhost:5300 即可開始使用。
|
||||
|
||||
#### Docker Compose 部署
|
||||
|
||||
```bash
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot
|
||||
cd LangBot/docker
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
@@ -57,6 +67,10 @@ docker compose up -d
|
||||
|
||||
直接使用發行版運行,查看文件[手動部署](https://docs.langbot.app/zh/deploy/langbot/manual.html)。
|
||||
|
||||
#### Kubernetes 部署
|
||||
|
||||
參考 [Kubernetes 部署](./docker/README_K8S.md) 文件。
|
||||
|
||||
## 😎 保持更新
|
||||
|
||||
點擊倉庫右上角 Star 和 Watch 按鈕,獲取最新動態。
|
||||
@@ -107,6 +121,7 @@ docker compose up -d
|
||||
| [勝算雲](https://www.shengsuanyun.com/?from=CH_KYIPP758) | ✅ | 大模型和 GPU 資源平台 |
|
||||
| [優雲智算](https://www.compshare.cn/?ytag=GPU_YY-gh_langbot) | ✅ | 大模型和 GPU 資源平台 |
|
||||
| [PPIO](https://ppinfra.com/user/register?invited_by=QJKFYD&utm_source=github_langbot) | ✅ | 大模型和 GPU 資源平台 |
|
||||
| [接口 AI](https://jiekou.ai/) | ✅ | 大模型聚合平台,專注全球大模型接入 |
|
||||
| [302.AI](https://share.302.ai/SuTG99) | ✅ | 大模型聚合平台 |
|
||||
| [Google Gemini](https://aistudio.google.com/prompts/new_chat) | ✅ | |
|
||||
| [Dify](https://dify.ai) | ✅ | LLMOps 平台 |
|
||||
|
||||
4
codecov.yml
Normal file
4
codecov.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
coverage:
|
||||
status:
|
||||
project: off
|
||||
patch: off
|
||||
629
docker/README_K8S.md
Normal file
629
docker/README_K8S.md
Normal file
@@ -0,0 +1,629 @@
|
||||
# LangBot Kubernetes 部署指南 / Kubernetes Deployment Guide
|
||||
|
||||
[简体中文](#简体中文) | [English](#english)
|
||||
|
||||
---
|
||||
|
||||
## 简体中文
|
||||
|
||||
### 概述
|
||||
|
||||
本指南提供了在 Kubernetes 集群中部署 LangBot 的完整步骤。Kubernetes 部署配置基于 `docker-compose.yaml`,适用于生产环境的容器化部署。
|
||||
|
||||
### 前置要求
|
||||
|
||||
- Kubernetes 集群(版本 1.19+)
|
||||
- `kubectl` 命令行工具已配置并可访问集群
|
||||
- 集群中有可用的存储类(StorageClass)用于持久化存储(可选但推荐)
|
||||
- 至少 2 vCPU 和 4GB RAM 的可用资源
|
||||
|
||||
### 架构说明
|
||||
|
||||
Kubernetes 部署包含以下组件:
|
||||
|
||||
1. **langbot**: 主应用服务
|
||||
- 提供 Web UI(端口 5300)
|
||||
- 处理平台 webhook(端口 2280-2290)
|
||||
- 数据持久化卷
|
||||
|
||||
2. **langbot-plugin-runtime**: 插件运行时服务
|
||||
- WebSocket 通信(端口 5400)
|
||||
- 插件数据持久化卷
|
||||
|
||||
3. **持久化存储**:
|
||||
- `langbot-data`: LangBot 主数据
|
||||
- `langbot-plugins`: 插件文件
|
||||
- `langbot-plugin-runtime-data`: 插件运行时数据
|
||||
|
||||
### 快速开始
|
||||
|
||||
#### 1. 下载部署文件
|
||||
|
||||
```bash
|
||||
# 克隆仓库
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot/docker
|
||||
|
||||
# 或直接下载 kubernetes.yaml
|
||||
wget https://raw.githubusercontent.com/langbot-app/LangBot/main/docker/kubernetes.yaml
|
||||
```
|
||||
|
||||
#### 2. 部署到 Kubernetes
|
||||
|
||||
```bash
|
||||
# 应用所有配置
|
||||
kubectl apply -f kubernetes.yaml
|
||||
|
||||
# 检查部署状态
|
||||
kubectl get all -n langbot
|
||||
|
||||
# 查看 Pod 日志
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
```
|
||||
|
||||
#### 3. 访问 LangBot
|
||||
|
||||
默认情况下,LangBot 服务使用 ClusterIP 类型,只能在集群内部访问。您可以选择以下方式之一来访问:
|
||||
|
||||
**选项 A: 端口转发(推荐用于测试)**
|
||||
|
||||
```bash
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
然后访问 http://localhost:5300
|
||||
|
||||
**选项 B: NodePort(适用于开发环境)**
|
||||
|
||||
编辑 `kubernetes.yaml`,取消注释 NodePort Service 部分,然后:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# 获取节点 IP
|
||||
kubectl get nodes -o wide
|
||||
# 访问 http://<NODE_IP>:30300
|
||||
```
|
||||
|
||||
**选项 C: LoadBalancer(适用于云环境)**
|
||||
|
||||
编辑 `kubernetes.yaml`,取消注释 LoadBalancer Service 部分,然后:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# 获取外部 IP
|
||||
kubectl get svc -n langbot langbot-loadbalancer
|
||||
# 访问 http://<EXTERNAL_IP>
|
||||
```
|
||||
|
||||
**选项 D: Ingress(推荐用于生产环境)**
|
||||
|
||||
确保集群中已安装 Ingress Controller(如 nginx-ingress),然后:
|
||||
|
||||
1. 编辑 `kubernetes.yaml` 中的 Ingress 配置
|
||||
2. 修改域名为您的实际域名
|
||||
3. 应用配置:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# 访问 http://langbot.yourdomain.com
|
||||
```
|
||||
|
||||
### 配置说明
|
||||
|
||||
#### 环境变量
|
||||
|
||||
在 `ConfigMap` 中配置环境变量:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: langbot-config
|
||||
namespace: langbot
|
||||
data:
|
||||
TZ: "Asia/Shanghai" # 修改为您的时区
|
||||
```
|
||||
|
||||
#### 存储配置
|
||||
|
||||
默认使用动态存储分配。如果您有特定的 StorageClass,请在 PVC 中指定:
|
||||
|
||||
```yaml
|
||||
spec:
|
||||
storageClassName: your-storage-class-name
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
```
|
||||
|
||||
#### 资源限制
|
||||
|
||||
根据您的需求调整资源限制:
|
||||
|
||||
```yaml
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "2000m"
|
||||
```
|
||||
|
||||
### 常用操作
|
||||
|
||||
#### 查看日志
|
||||
|
||||
```bash
|
||||
# 查看 LangBot 主服务日志
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
|
||||
# 查看插件运行时日志
|
||||
kubectl logs -n langbot -l app=langbot-plugin-runtime -f
|
||||
```
|
||||
|
||||
#### 重启服务
|
||||
|
||||
```bash
|
||||
# 重启 LangBot
|
||||
kubectl rollout restart deployment/langbot -n langbot
|
||||
|
||||
# 重启插件运行时
|
||||
kubectl rollout restart deployment/langbot-plugin-runtime -n langbot
|
||||
```
|
||||
|
||||
#### 更新镜像
|
||||
|
||||
```bash
|
||||
# 更新到最新版本
|
||||
kubectl set image deployment/langbot -n langbot langbot=rockchin/langbot:latest
|
||||
kubectl set image deployment/langbot-plugin-runtime -n langbot langbot-plugin-runtime=rockchin/langbot:latest
|
||||
|
||||
# 检查更新状态
|
||||
kubectl rollout status deployment/langbot -n langbot
|
||||
```
|
||||
|
||||
#### 扩容(不推荐)
|
||||
|
||||
注意:由于 LangBot 使用 ReadWriteOnce 的持久化存储,不支持多副本扩容。如需高可用,请考虑使用 ReadWriteMany 存储或其他架构方案。
|
||||
|
||||
#### 备份数据
|
||||
|
||||
```bash
|
||||
# 备份 PVC 数据
|
||||
kubectl exec -n langbot -it <langbot-pod-name> -- tar czf /tmp/backup.tar.gz /app/data
|
||||
kubectl cp langbot/<langbot-pod-name>:/tmp/backup.tar.gz ./backup.tar.gz
|
||||
```
|
||||
|
||||
### 卸载
|
||||
|
||||
```bash
|
||||
# 删除所有资源(保留 PVC)
|
||||
kubectl delete deployment,service,configmap -n langbot --all
|
||||
|
||||
# 删除 PVC(会删除数据)
|
||||
kubectl delete pvc -n langbot --all
|
||||
|
||||
# 删除命名空间
|
||||
kubectl delete namespace langbot
|
||||
```
|
||||
|
||||
### 故障排查
|
||||
|
||||
#### Pod 无法启动
|
||||
|
||||
```bash
|
||||
# 查看 Pod 状态
|
||||
kubectl get pods -n langbot
|
||||
|
||||
# 查看详细信息
|
||||
kubectl describe pod -n langbot <pod-name>
|
||||
|
||||
# 查看事件
|
||||
kubectl get events -n langbot --sort-by='.lastTimestamp'
|
||||
```
|
||||
|
||||
#### 存储问题
|
||||
|
||||
```bash
|
||||
# 检查 PVC 状态
|
||||
kubectl get pvc -n langbot
|
||||
|
||||
# 检查 PV
|
||||
kubectl get pv
|
||||
```
|
||||
|
||||
#### 网络访问问题
|
||||
|
||||
```bash
|
||||
# 检查 Service
|
||||
kubectl get svc -n langbot
|
||||
|
||||
# 检查端口转发
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
### 生产环境建议
|
||||
|
||||
1. **使用特定版本标签**:避免使用 `latest` 标签,使用具体版本号如 `rockchin/langbot:v1.0.0`
|
||||
2. **配置资源限制**:根据实际负载调整 CPU 和内存限制
|
||||
3. **使用 Ingress + TLS**:配置 HTTPS 访问和证书管理
|
||||
4. **配置监控和告警**:集成 Prometheus、Grafana 等监控工具
|
||||
5. **定期备份**:配置自动备份策略保护数据
|
||||
6. **使用专用 StorageClass**:为生产环境配置高性能存储
|
||||
7. **配置亲和性规则**:确保 Pod 调度到合适的节点
|
||||
|
||||
### 高级配置
|
||||
|
||||
#### 使用 Secrets 管理敏感信息
|
||||
|
||||
如果需要配置 API 密钥等敏感信息:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: langbot-secrets
|
||||
namespace: langbot
|
||||
type: Opaque
|
||||
data:
|
||||
api_key: <base64-encoded-value>
|
||||
```
|
||||
|
||||
然后在 Deployment 中引用:
|
||||
|
||||
```yaml
|
||||
env:
|
||||
- name: API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: langbot-secrets
|
||||
key: api_key
|
||||
```
|
||||
|
||||
#### 配置水平自动扩缩容(HPA)
|
||||
|
||||
注意:需要确保使用 ReadWriteMany 存储类型
|
||||
|
||||
```yaml
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: langbot-hpa
|
||||
namespace: langbot
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: langbot
|
||||
minReplicas: 1
|
||||
maxReplicas: 3
|
||||
metrics:
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: 70
|
||||
```
|
||||
|
||||
### 参考资源
|
||||
|
||||
- [LangBot 官方文档](https://docs.langbot.app)
|
||||
- [Docker 部署文档](https://docs.langbot.app/zh/deploy/langbot/docker.html)
|
||||
- [Kubernetes 官方文档](https://kubernetes.io/docs/)
|
||||
|
||||
---
|
||||
|
||||
## English
|
||||
|
||||
### Overview
|
||||
|
||||
This guide provides complete steps for deploying LangBot in a Kubernetes cluster. The Kubernetes deployment configuration is based on `docker-compose.yaml` and is suitable for production containerized deployments.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Kubernetes cluster (version 1.19+)
|
||||
- `kubectl` command-line tool configured with cluster access
|
||||
- Available StorageClass in the cluster for persistent storage (optional but recommended)
|
||||
- At least 2 vCPU and 4GB RAM of available resources
|
||||
|
||||
### Architecture
|
||||
|
||||
The Kubernetes deployment includes the following components:
|
||||
|
||||
1. **langbot**: Main application service
|
||||
- Provides Web UI (port 5300)
|
||||
- Handles platform webhooks (ports 2280-2290)
|
||||
- Data persistence volume
|
||||
|
||||
2. **langbot-plugin-runtime**: Plugin runtime service
|
||||
- WebSocket communication (port 5400)
|
||||
- Plugin data persistence volume
|
||||
|
||||
3. **Persistent Storage**:
|
||||
- `langbot-data`: LangBot main data
|
||||
- `langbot-plugins`: Plugin files
|
||||
- `langbot-plugin-runtime-data`: Plugin runtime data
|
||||
|
||||
### Quick Start
|
||||
|
||||
#### 1. Download Deployment Files
|
||||
|
||||
```bash
|
||||
# Clone repository
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot/docker
|
||||
|
||||
# Or download kubernetes.yaml directly
|
||||
wget https://raw.githubusercontent.com/langbot-app/LangBot/main/docker/kubernetes.yaml
|
||||
```
|
||||
|
||||
#### 2. Deploy to Kubernetes
|
||||
|
||||
```bash
|
||||
# Apply all configurations
|
||||
kubectl apply -f kubernetes.yaml
|
||||
|
||||
# Check deployment status
|
||||
kubectl get all -n langbot
|
||||
|
||||
# View Pod logs
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
```
|
||||
|
||||
#### 3. Access LangBot
|
||||
|
||||
By default, LangBot service uses ClusterIP type, accessible only within the cluster. Choose one of the following methods to access:
|
||||
|
||||
**Option A: Port Forwarding (Recommended for testing)**
|
||||
|
||||
```bash
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
Then visit http://localhost:5300
|
||||
|
||||
**Option B: NodePort (Suitable for development)**
|
||||
|
||||
Edit `kubernetes.yaml`, uncomment the NodePort Service section, then:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# Get node IP
|
||||
kubectl get nodes -o wide
|
||||
# Visit http://<NODE_IP>:30300
|
||||
```
|
||||
|
||||
**Option C: LoadBalancer (Suitable for cloud environments)**
|
||||
|
||||
Edit `kubernetes.yaml`, uncomment the LoadBalancer Service section, then:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# Get external IP
|
||||
kubectl get svc -n langbot langbot-loadbalancer
|
||||
# Visit http://<EXTERNAL_IP>
|
||||
```
|
||||
|
||||
**Option D: Ingress (Recommended for production)**
|
||||
|
||||
Ensure an Ingress Controller (e.g., nginx-ingress) is installed in the cluster, then:
|
||||
|
||||
1. Edit the Ingress configuration in `kubernetes.yaml`
|
||||
2. Change the domain to your actual domain
|
||||
3. Apply configuration:
|
||||
|
||||
```bash
|
||||
kubectl apply -f kubernetes.yaml
|
||||
# Visit http://langbot.yourdomain.com
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
Configure environment variables in ConfigMap:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: langbot-config
|
||||
namespace: langbot
|
||||
data:
|
||||
TZ: "Asia/Shanghai" # Change to your timezone
|
||||
```
|
||||
|
||||
#### Storage Configuration
|
||||
|
||||
Uses dynamic storage provisioning by default. If you have a specific StorageClass, specify it in PVC:
|
||||
|
||||
```yaml
|
||||
spec:
|
||||
storageClassName: your-storage-class-name
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
```
|
||||
|
||||
#### Resource Limits
|
||||
|
||||
Adjust resource limits based on your needs:
|
||||
|
||||
```yaml
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "2000m"
|
||||
```
|
||||
|
||||
### Common Operations
|
||||
|
||||
#### View Logs
|
||||
|
||||
```bash
|
||||
# View LangBot main service logs
|
||||
kubectl logs -n langbot -l app=langbot -f
|
||||
|
||||
# View plugin runtime logs
|
||||
kubectl logs -n langbot -l app=langbot-plugin-runtime -f
|
||||
```
|
||||
|
||||
#### Restart Services
|
||||
|
||||
```bash
|
||||
# Restart LangBot
|
||||
kubectl rollout restart deployment/langbot -n langbot
|
||||
|
||||
# Restart plugin runtime
|
||||
kubectl rollout restart deployment/langbot-plugin-runtime -n langbot
|
||||
```
|
||||
|
||||
#### Update Images
|
||||
|
||||
```bash
|
||||
# Update to latest version
|
||||
kubectl set image deployment/langbot -n langbot langbot=rockchin/langbot:latest
|
||||
kubectl set image deployment/langbot-plugin-runtime -n langbot langbot-plugin-runtime=rockchin/langbot:latest
|
||||
|
||||
# Check update status
|
||||
kubectl rollout status deployment/langbot -n langbot
|
||||
```
|
||||
|
||||
#### Scaling (Not Recommended)
|
||||
|
||||
Note: Due to LangBot using ReadWriteOnce persistent storage, multi-replica scaling is not supported. For high availability, consider using ReadWriteMany storage or alternative architectures.
|
||||
|
||||
#### Backup Data
|
||||
|
||||
```bash
|
||||
# Backup PVC data
|
||||
kubectl exec -n langbot -it <langbot-pod-name> -- tar czf /tmp/backup.tar.gz /app/data
|
||||
kubectl cp langbot/<langbot-pod-name>:/tmp/backup.tar.gz ./backup.tar.gz
|
||||
```
|
||||
|
||||
### Uninstall
|
||||
|
||||
```bash
|
||||
# Delete all resources (keep PVCs)
|
||||
kubectl delete deployment,service,configmap -n langbot --all
|
||||
|
||||
# Delete PVCs (will delete data)
|
||||
kubectl delete pvc -n langbot --all
|
||||
|
||||
# Delete namespace
|
||||
kubectl delete namespace langbot
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
#### Pods Not Starting
|
||||
|
||||
```bash
|
||||
# Check Pod status
|
||||
kubectl get pods -n langbot
|
||||
|
||||
# View detailed information
|
||||
kubectl describe pod -n langbot <pod-name>
|
||||
|
||||
# View events
|
||||
kubectl get events -n langbot --sort-by='.lastTimestamp'
|
||||
```
|
||||
|
||||
#### Storage Issues
|
||||
|
||||
```bash
|
||||
# Check PVC status
|
||||
kubectl get pvc -n langbot
|
||||
|
||||
# Check PV
|
||||
kubectl get pv
|
||||
```
|
||||
|
||||
#### Network Access Issues
|
||||
|
||||
```bash
|
||||
# Check Service
|
||||
kubectl get svc -n langbot
|
||||
|
||||
# Test port forwarding
|
||||
kubectl port-forward -n langbot svc/langbot 5300:5300
|
||||
```
|
||||
|
||||
### Production Recommendations
|
||||
|
||||
1. **Use specific version tags**: Avoid using `latest` tag, use specific version like `rockchin/langbot:v1.0.0`
|
||||
2. **Configure resource limits**: Adjust CPU and memory limits based on actual load
|
||||
3. **Use Ingress + TLS**: Configure HTTPS access and certificate management
|
||||
4. **Configure monitoring and alerts**: Integrate monitoring tools like Prometheus, Grafana
|
||||
5. **Regular backups**: Configure automated backup strategy to protect data
|
||||
6. **Use dedicated StorageClass**: Configure high-performance storage for production
|
||||
7. **Configure affinity rules**: Ensure Pods are scheduled to appropriate nodes
|
||||
|
||||
### Advanced Configuration
|
||||
|
||||
#### Using Secrets for Sensitive Information
|
||||
|
||||
If you need to configure sensitive information like API keys:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: langbot-secrets
|
||||
namespace: langbot
|
||||
type: Opaque
|
||||
data:
|
||||
api_key: <base64-encoded-value>
|
||||
```
|
||||
|
||||
Then reference in Deployment:
|
||||
|
||||
```yaml
|
||||
env:
|
||||
- name: API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: langbot-secrets
|
||||
key: api_key
|
||||
```
|
||||
|
||||
#### Configure Horizontal Pod Autoscaling (HPA)
|
||||
|
||||
Note: Requires ReadWriteMany storage type
|
||||
|
||||
```yaml
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: langbot-hpa
|
||||
namespace: langbot
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: langbot
|
||||
minReplicas: 1
|
||||
maxReplicas: 3
|
||||
metrics:
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: 70
|
||||
```
|
||||
|
||||
### References
|
||||
|
||||
- [LangBot Official Documentation](https://docs.langbot.app)
|
||||
- [Docker Deployment Guide](https://docs.langbot.app/zh/deploy/langbot/docker.html)
|
||||
- [Kubernetes Official Documentation](https://kubernetes.io/docs/)
|
||||
74
docker/deploy-k8s-test.sh
Executable file
74
docker/deploy-k8s-test.sh
Executable file
@@ -0,0 +1,74 @@
|
||||
#!/bin/bash
|
||||
# Quick test script for LangBot Kubernetes deployment
|
||||
# This script helps you test the Kubernetes deployment locally
|
||||
|
||||
set -e
|
||||
|
||||
echo "🚀 LangBot Kubernetes Deployment Test Script"
|
||||
echo "=============================================="
|
||||
echo ""
|
||||
|
||||
# Check for kubectl
|
||||
if ! command -v kubectl &> /dev/null; then
|
||||
echo "❌ kubectl is not installed. Please install kubectl first."
|
||||
echo "Visit: https://kubernetes.io/docs/tasks/tools/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ kubectl is installed"
|
||||
|
||||
# Check if kubectl can connect to a cluster
|
||||
if ! kubectl cluster-info &> /dev/null; then
|
||||
echo ""
|
||||
echo "⚠️ No Kubernetes cluster found."
|
||||
echo ""
|
||||
echo "To test locally, you can use:"
|
||||
echo " - kind: https://kind.sigs.k8s.io/"
|
||||
echo " - minikube: https://minikube.sigs.k8s.io/"
|
||||
echo " - k3s: https://k3s.io/"
|
||||
echo ""
|
||||
echo "Example with kind:"
|
||||
echo " kind create cluster --name langbot-test"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Connected to Kubernetes cluster"
|
||||
kubectl cluster-info
|
||||
echo ""
|
||||
|
||||
# Ask user to confirm
|
||||
read -p "Do you want to deploy LangBot to this cluster? (y/N) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Deployment cancelled."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "📦 Deploying LangBot..."
|
||||
kubectl apply -f kubernetes.yaml
|
||||
|
||||
echo ""
|
||||
echo "⏳ Waiting for pods to be ready..."
|
||||
kubectl wait --for=condition=ready pod -l app=langbot -n langbot --timeout=300s
|
||||
kubectl wait --for=condition=ready pod -l app=langbot-plugin-runtime -n langbot --timeout=300s
|
||||
|
||||
echo ""
|
||||
echo "✅ Deployment complete!"
|
||||
echo ""
|
||||
echo "📊 Deployment status:"
|
||||
kubectl get all -n langbot
|
||||
|
||||
echo ""
|
||||
echo "🌐 To access LangBot Web UI, run:"
|
||||
echo " kubectl port-forward -n langbot svc/langbot 5300:5300"
|
||||
echo ""
|
||||
echo "Then visit: http://localhost:5300"
|
||||
echo ""
|
||||
echo "📝 To view logs:"
|
||||
echo " kubectl logs -n langbot -l app=langbot -f"
|
||||
echo ""
|
||||
echo "🗑️ To uninstall:"
|
||||
echo " kubectl delete namespace langbot"
|
||||
echo ""
|
||||
@@ -1,3 +1,5 @@
|
||||
# Docker Compose configuration for LangBot
|
||||
# For Kubernetes deployment, see kubernetes.yaml and README_K8S.md
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
@@ -5,6 +7,7 @@ services:
|
||||
langbot_plugin_runtime:
|
||||
image: rockchin/langbot:latest
|
||||
container_name: langbot_plugin_runtime
|
||||
platform: linux/amd64 # For Apple Silicon compatibility
|
||||
volumes:
|
||||
- ./data/plugins:/app/data/plugins
|
||||
ports:
|
||||
@@ -19,6 +22,7 @@ services:
|
||||
langbot:
|
||||
image: rockchin/langbot:latest
|
||||
container_name: langbot
|
||||
platform: linux/amd64 # For Apple Silicon compatibility
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./plugins:/app/plugins
|
||||
|
||||
400
docker/kubernetes.yaml
Normal file
400
docker/kubernetes.yaml
Normal file
@@ -0,0 +1,400 @@
|
||||
# Kubernetes Deployment for LangBot
|
||||
# This file provides Kubernetes deployment manifests for LangBot based on docker-compose.yaml
|
||||
#
|
||||
# Usage:
|
||||
# kubectl apply -f kubernetes.yaml
|
||||
#
|
||||
# Prerequisites:
|
||||
# - A Kubernetes cluster (1.19+)
|
||||
# - kubectl configured to communicate with your cluster
|
||||
# - (Optional) A StorageClass for dynamic volume provisioning
|
||||
#
|
||||
# Components:
|
||||
# - Namespace: langbot
|
||||
# - PersistentVolumeClaims for data persistence
|
||||
# - Deployments for langbot and langbot_plugin_runtime
|
||||
# - Services for network access
|
||||
# - ConfigMap for timezone configuration
|
||||
|
||||
---
|
||||
# Namespace
|
||||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: langbot
|
||||
labels:
|
||||
app: langbot
|
||||
|
||||
---
|
||||
# PersistentVolumeClaim for LangBot data
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: langbot-data
|
||||
namespace: langbot
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
# Uncomment and modify if you have a specific StorageClass
|
||||
# storageClassName: your-storage-class
|
||||
|
||||
---
|
||||
# PersistentVolumeClaim for LangBot plugins
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: langbot-plugins
|
||||
namespace: langbot
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
||||
# Uncomment and modify if you have a specific StorageClass
|
||||
# storageClassName: your-storage-class
|
||||
|
||||
---
|
||||
# PersistentVolumeClaim for Plugin Runtime data
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: langbot-plugin-runtime-data
|
||||
namespace: langbot
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
||||
# Uncomment and modify if you have a specific StorageClass
|
||||
# storageClassName: your-storage-class
|
||||
|
||||
---
|
||||
# ConfigMap for environment configuration
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: langbot-config
|
||||
namespace: langbot
|
||||
data:
|
||||
TZ: "Asia/Shanghai"
|
||||
PLUGIN__RUNTIME_WS_URL: "ws://langbot-plugin-runtime:5400/control/ws"
|
||||
|
||||
---
|
||||
# Deployment for LangBot Plugin Runtime
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: langbot-plugin-runtime
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot-plugin-runtime
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: langbot-plugin-runtime
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: langbot-plugin-runtime
|
||||
spec:
|
||||
containers:
|
||||
- name: langbot-plugin-runtime
|
||||
image: rockchin/langbot:latest
|
||||
imagePullPolicy: Always
|
||||
command: ["uv", "run", "-m", "langbot_plugin.cli.__init__", "rt"]
|
||||
ports:
|
||||
- containerPort: 5400
|
||||
name: runtime
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: TZ
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: langbot-config
|
||||
key: TZ
|
||||
volumeMounts:
|
||||
- name: plugin-data
|
||||
mountPath: /app/data/plugins
|
||||
resources:
|
||||
requests:
|
||||
memory: "512Mi"
|
||||
cpu: "250m"
|
||||
limits:
|
||||
memory: "2Gi"
|
||||
cpu: "1000m"
|
||||
# Liveness probe to restart container if it becomes unresponsive
|
||||
livenessProbe:
|
||||
tcpSocket:
|
||||
port: 5400
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
# Readiness probe to know when container is ready to accept traffic
|
||||
readinessProbe:
|
||||
tcpSocket:
|
||||
port: 5400
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
volumes:
|
||||
- name: plugin-data
|
||||
persistentVolumeClaim:
|
||||
claimName: langbot-plugin-runtime-data
|
||||
restartPolicy: Always
|
||||
|
||||
---
|
||||
# Service for LangBot Plugin Runtime
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: langbot-plugin-runtime
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot-plugin-runtime
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: langbot-plugin-runtime
|
||||
ports:
|
||||
- port: 5400
|
||||
targetPort: 5400
|
||||
protocol: TCP
|
||||
name: runtime
|
||||
|
||||
---
|
||||
# Deployment for LangBot
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: langbot
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: langbot
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: langbot
|
||||
spec:
|
||||
containers:
|
||||
- name: langbot
|
||||
image: rockchin/langbot:latest
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- containerPort: 5300
|
||||
name: web
|
||||
protocol: TCP
|
||||
- containerPort: 2280
|
||||
name: webhook-start
|
||||
protocol: TCP
|
||||
# Note: Kubernetes doesn't support port ranges directly in container ports
|
||||
# The webhook ports 2280-2290 are available, but we only expose the start of the range
|
||||
# If you need all ports exposed, consider using a Service with multiple port definitions
|
||||
env:
|
||||
- name: TZ
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: langbot-config
|
||||
key: TZ
|
||||
- name: PLUGIN__RUNTIME_WS_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: langbot-config
|
||||
key: PLUGIN__RUNTIME_WS_URL
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /app/data
|
||||
- name: plugins
|
||||
mountPath: /app/plugins
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "2000m"
|
||||
# Liveness probe to restart container if it becomes unresponsive
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 5300
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
# Readiness probe to know when container is ready to accept traffic
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 5300
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
volumes:
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: langbot-data
|
||||
- name: plugins
|
||||
persistentVolumeClaim:
|
||||
claimName: langbot-plugins
|
||||
restartPolicy: Always
|
||||
|
||||
---
|
||||
# Service for LangBot (ClusterIP for internal access)
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: langbot
|
||||
namespace: langbot
|
||||
labels:
|
||||
app: langbot
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: langbot
|
||||
ports:
|
||||
- port: 5300
|
||||
targetPort: 5300
|
||||
protocol: TCP
|
||||
name: web
|
||||
- port: 2280
|
||||
targetPort: 2280
|
||||
protocol: TCP
|
||||
name: webhook-2280
|
||||
- port: 2281
|
||||
targetPort: 2281
|
||||
protocol: TCP
|
||||
name: webhook-2281
|
||||
- port: 2282
|
||||
targetPort: 2282
|
||||
protocol: TCP
|
||||
name: webhook-2282
|
||||
- port: 2283
|
||||
targetPort: 2283
|
||||
protocol: TCP
|
||||
name: webhook-2283
|
||||
- port: 2284
|
||||
targetPort: 2284
|
||||
protocol: TCP
|
||||
name: webhook-2284
|
||||
- port: 2285
|
||||
targetPort: 2285
|
||||
protocol: TCP
|
||||
name: webhook-2285
|
||||
- port: 2286
|
||||
targetPort: 2286
|
||||
protocol: TCP
|
||||
name: webhook-2286
|
||||
- port: 2287
|
||||
targetPort: 2287
|
||||
protocol: TCP
|
||||
name: webhook-2287
|
||||
- port: 2288
|
||||
targetPort: 2288
|
||||
protocol: TCP
|
||||
name: webhook-2288
|
||||
- port: 2289
|
||||
targetPort: 2289
|
||||
protocol: TCP
|
||||
name: webhook-2289
|
||||
- port: 2290
|
||||
targetPort: 2290
|
||||
protocol: TCP
|
||||
name: webhook-2290
|
||||
|
||||
---
|
||||
# Ingress for external access (Optional - requires Ingress Controller)
|
||||
# Uncomment and modify the following section if you want to expose LangBot via Ingress
|
||||
# apiVersion: networking.k8s.io/v1
|
||||
# kind: Ingress
|
||||
# metadata:
|
||||
# name: langbot-ingress
|
||||
# namespace: langbot
|
||||
# annotations:
|
||||
# # Uncomment and modify based on your ingress controller
|
||||
# # nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
# # cert-manager.io/cluster-issuer: letsencrypt-prod
|
||||
# spec:
|
||||
# ingressClassName: nginx # Change based on your ingress controller
|
||||
# rules:
|
||||
# - host: langbot.yourdomain.com # Change to your domain
|
||||
# http:
|
||||
# paths:
|
||||
# - path: /
|
||||
# pathType: Prefix
|
||||
# backend:
|
||||
# service:
|
||||
# name: langbot
|
||||
# port:
|
||||
# number: 5300
|
||||
# # Uncomment for TLS/HTTPS
|
||||
# # tls:
|
||||
# # - hosts:
|
||||
# # - langbot.yourdomain.com
|
||||
# # secretName: langbot-tls
|
||||
|
||||
---
|
||||
# Service for LangBot with LoadBalancer (Alternative to Ingress)
|
||||
# Uncomment the following if you want to expose LangBot directly via LoadBalancer
|
||||
# This is useful in cloud environments (AWS, GCP, Azure, etc.)
|
||||
# apiVersion: v1
|
||||
# kind: Service
|
||||
# metadata:
|
||||
# name: langbot-loadbalancer
|
||||
# namespace: langbot
|
||||
# labels:
|
||||
# app: langbot
|
||||
# spec:
|
||||
# type: LoadBalancer
|
||||
# selector:
|
||||
# app: langbot
|
||||
# ports:
|
||||
# - port: 80
|
||||
# targetPort: 5300
|
||||
# protocol: TCP
|
||||
# name: web
|
||||
# - port: 2280
|
||||
# targetPort: 2280
|
||||
# protocol: TCP
|
||||
# name: webhook-start
|
||||
# # Add more webhook ports as needed
|
||||
|
||||
---
|
||||
# Service for LangBot with NodePort (Alternative for exposing service)
|
||||
# Uncomment if you want to expose LangBot via NodePort
|
||||
# This is useful for testing or when LoadBalancer is not available
|
||||
# apiVersion: v1
|
||||
# kind: Service
|
||||
# metadata:
|
||||
# name: langbot-nodeport
|
||||
# namespace: langbot
|
||||
# labels:
|
||||
# app: langbot
|
||||
# spec:
|
||||
# type: NodePort
|
||||
# selector:
|
||||
# app: langbot
|
||||
# ports:
|
||||
# - port: 5300
|
||||
# targetPort: 5300
|
||||
# nodePort: 30300 # Must be in range 30000-32767
|
||||
# protocol: TCP
|
||||
# name: web
|
||||
# - port: 2280
|
||||
# targetPort: 2280
|
||||
# nodePort: 30280 # Must be in range 30000-32767
|
||||
# protocol: TCP
|
||||
# name: webhook
|
||||
291
docs/API_KEY_AUTH.md
Normal file
291
docs/API_KEY_AUTH.md
Normal file
@@ -0,0 +1,291 @@
|
||||
# API Key Authentication
|
||||
|
||||
LangBot now supports API key authentication for external systems to access its HTTP service API.
|
||||
|
||||
## Managing API Keys
|
||||
|
||||
API keys can be managed through the web interface:
|
||||
|
||||
1. Log in to the LangBot web interface
|
||||
2. Click the "API Keys" button at the bottom of the sidebar
|
||||
3. Create, view, copy, or delete API keys as needed
|
||||
|
||||
## Using API Keys
|
||||
|
||||
### Authentication Headers
|
||||
|
||||
Include your API key in the request header using one of these methods:
|
||||
|
||||
**Method 1: X-API-Key header (Recommended)**
|
||||
```
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
**Method 2: Authorization Bearer token**
|
||||
```
|
||||
Authorization: Bearer lbk_your_api_key_here
|
||||
```
|
||||
|
||||
## Available APIs
|
||||
|
||||
All existing LangBot APIs now support **both user token and API key authentication**. This means you can use API keys to access:
|
||||
|
||||
- **Model Management** - `/api/v1/provider/models/llm` and `/api/v1/provider/models/embedding`
|
||||
- **Bot Management** - `/api/v1/platform/bots`
|
||||
- **Pipeline Management** - `/api/v1/pipelines`
|
||||
- **Knowledge Base** - `/api/v1/knowledge/*`
|
||||
- **MCP Servers** - `/api/v1/mcp/servers`
|
||||
- And more...
|
||||
|
||||
### Authentication Methods
|
||||
|
||||
Each endpoint accepts **either**:
|
||||
1. **User Token** (via `Authorization: Bearer <user_jwt_token>`) - for web UI and authenticated users
|
||||
2. **API Key** (via `X-API-Key` or `Authorization: Bearer <api_key>`) - for external services
|
||||
|
||||
## Example: Model Management
|
||||
|
||||
### List All LLM Models
|
||||
|
||||
```http
|
||||
GET /api/v1/provider/models/llm
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"code": 0,
|
||||
"msg": "ok",
|
||||
"data": {
|
||||
"models": [
|
||||
{
|
||||
"uuid": "model-uuid",
|
||||
"name": "GPT-4",
|
||||
"description": "OpenAI GPT-4 model",
|
||||
"requester": "openai-chat-completions",
|
||||
"requester_config": {...},
|
||||
"abilities": ["chat", "vision"],
|
||||
"created_at": "2024-01-01T00:00:00",
|
||||
"updated_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Create a New LLM Model
|
||||
|
||||
```http
|
||||
POST /api/v1/provider/models/llm
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "My Custom Model",
|
||||
"description": "Description of the model",
|
||||
"requester": "openai-chat-completions",
|
||||
"requester_config": {
|
||||
"model": "gpt-4",
|
||||
"args": {}
|
||||
},
|
||||
"api_keys": [
|
||||
{
|
||||
"name": "default",
|
||||
"keys": ["sk-..."]
|
||||
}
|
||||
],
|
||||
"abilities": ["chat"],
|
||||
"extra_args": {}
|
||||
}
|
||||
```
|
||||
|
||||
### Update an LLM Model
|
||||
|
||||
```http
|
||||
PUT /api/v1/provider/models/llm/{model_uuid}
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "Updated Model Name",
|
||||
"description": "Updated description",
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
### Delete an LLM Model
|
||||
|
||||
```http
|
||||
DELETE /api/v1/provider/models/llm/{model_uuid}
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
## Example: Bot Management
|
||||
|
||||
### List All Bots
|
||||
|
||||
```http
|
||||
GET /api/v1/platform/bots
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
### Create a New Bot
|
||||
|
||||
```http
|
||||
POST /api/v1/platform/bots
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "My Bot",
|
||||
"adapter": "telegram",
|
||||
"config": {...}
|
||||
}
|
||||
```
|
||||
|
||||
## Example: Pipeline Management
|
||||
|
||||
### List All Pipelines
|
||||
|
||||
```http
|
||||
GET /api/v1/pipelines
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
```
|
||||
|
||||
### Create a New Pipeline
|
||||
|
||||
```http
|
||||
POST /api/v1/pipelines
|
||||
X-API-Key: lbk_your_api_key_here
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "My Pipeline",
|
||||
"config": {...}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Responses
|
||||
|
||||
### 401 Unauthorized
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -1,
|
||||
"msg": "No valid authentication provided (user token or API key required)"
|
||||
}
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -1,
|
||||
"msg": "Invalid API key"
|
||||
}
|
||||
```
|
||||
|
||||
### 404 Not Found
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -1,
|
||||
"msg": "Resource not found"
|
||||
}
|
||||
```
|
||||
|
||||
### 500 Internal Server Error
|
||||
|
||||
```json
|
||||
{
|
||||
"code": -2,
|
||||
"msg": "Error message details"
|
||||
}
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. **Keep API keys secure**: Store them securely and never commit them to version control
|
||||
2. **Use HTTPS**: Always use HTTPS in production to encrypt API key transmission
|
||||
3. **Rotate keys regularly**: Create new API keys periodically and delete old ones
|
||||
4. **Use descriptive names**: Give your API keys meaningful names to track their usage
|
||||
5. **Delete unused keys**: Remove API keys that are no longer needed
|
||||
6. **Use X-API-Key header**: Prefer using the `X-API-Key` header for clarity
|
||||
|
||||
## Example: Python Client
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
API_KEY = "lbk_your_api_key_here"
|
||||
BASE_URL = "http://your-langbot-server:5300"
|
||||
|
||||
headers = {
|
||||
"X-API-Key": API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
# List all models
|
||||
response = requests.get(f"{BASE_URL}/api/v1/provider/models/llm", headers=headers)
|
||||
models = response.json()["data"]["models"]
|
||||
|
||||
print(f"Found {len(models)} models")
|
||||
for model in models:
|
||||
print(f"- {model['name']}: {model['description']}")
|
||||
|
||||
# Create a new bot
|
||||
bot_data = {
|
||||
"name": "My Telegram Bot",
|
||||
"adapter": "telegram",
|
||||
"config": {
|
||||
"token": "your-telegram-token"
|
||||
}
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{BASE_URL}/api/v1/platform/bots",
|
||||
headers=headers,
|
||||
json=bot_data
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
bot_uuid = response.json()["data"]["uuid"]
|
||||
print(f"Bot created with UUID: {bot_uuid}")
|
||||
```
|
||||
|
||||
## Example: cURL
|
||||
|
||||
```bash
|
||||
# List all models
|
||||
curl -X GET \
|
||||
-H "X-API-Key: lbk_your_api_key_here" \
|
||||
http://your-langbot-server:5300/api/v1/provider/models/llm
|
||||
|
||||
# Create a new pipeline
|
||||
curl -X POST \
|
||||
-H "X-API-Key: lbk_your_api_key_here" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"name": "My Pipeline",
|
||||
"config": {...}
|
||||
}' \
|
||||
http://your-langbot-server:5300/api/v1/pipelines
|
||||
|
||||
# Get bot logs
|
||||
curl -X POST \
|
||||
-H "X-API-Key: lbk_your_api_key_here" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"from_index": -1,
|
||||
"max_count": 10
|
||||
}' \
|
||||
http://your-langbot-server:5300/api/v1/platform/bots/{bot_uuid}/logs
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- The same endpoints work for both the web UI (with user tokens) and external services (with API keys)
|
||||
- No need to learn different API paths - use the existing API documentation with API key authentication
|
||||
- All endpoints that previously required user authentication now also accept API keys
|
||||
|
||||
117
docs/PYPI_INSTALLATION.md
Normal file
117
docs/PYPI_INSTALLATION.md
Normal file
@@ -0,0 +1,117 @@
|
||||
# LangBot PyPI Package Installation
|
||||
|
||||
## Quick Start with uvx
|
||||
|
||||
The easiest way to run LangBot is using `uvx` (recommended for quick testing):
|
||||
|
||||
```bash
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
This will automatically download and run the latest version of LangBot.
|
||||
|
||||
## Install with pip/uv
|
||||
|
||||
You can also install LangBot as a regular Python package:
|
||||
|
||||
```bash
|
||||
# Using pip
|
||||
pip install langbot
|
||||
|
||||
# Using uv
|
||||
uv pip install langbot
|
||||
```
|
||||
|
||||
Then run it:
|
||||
|
||||
```bash
|
||||
langbot
|
||||
```
|
||||
|
||||
Or using Python module syntax:
|
||||
|
||||
```bash
|
||||
python -m langbot
|
||||
```
|
||||
|
||||
## Installation with Frontend
|
||||
|
||||
When published to PyPI, the LangBot package includes the pre-built frontend files. You don't need to build the frontend separately.
|
||||
|
||||
## Data Directory
|
||||
|
||||
When running LangBot as a package, it will create a `data/` directory in your current working directory to store configuration, logs, and other runtime data. You can run LangBot from any directory, and it will set up its data directory there.
|
||||
|
||||
## Command Line Options
|
||||
|
||||
LangBot supports the following command line options:
|
||||
|
||||
- `--standalone-runtime`: Use standalone plugin runtime
|
||||
- `--debug`: Enable debug mode
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
langbot --debug
|
||||
```
|
||||
|
||||
## Comparison with Other Installation Methods
|
||||
|
||||
### PyPI Package (uvx/pip)
|
||||
- **Pros**: Easy to install and update, no need to clone repository or build frontend
|
||||
- **Cons**: Less flexible for development/customization
|
||||
|
||||
### Docker
|
||||
- **Pros**: Isolated environment, easy deployment
|
||||
- **Cons**: Requires Docker
|
||||
|
||||
### Manual Source Installation
|
||||
- **Pros**: Full control, easy to customize and develop
|
||||
- **Cons**: Requires building frontend, managing dependencies manually
|
||||
|
||||
## Development
|
||||
|
||||
If you want to contribute or customize LangBot, you should still use the manual installation method by cloning the repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/langbot-app/LangBot
|
||||
cd LangBot
|
||||
uv sync
|
||||
cd web
|
||||
npm install
|
||||
npm run build
|
||||
cd ..
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
## Updating
|
||||
|
||||
To update to the latest version:
|
||||
|
||||
```bash
|
||||
# With pip
|
||||
pip install --upgrade langbot
|
||||
|
||||
# With uv
|
||||
uv pip install --upgrade langbot
|
||||
|
||||
# With uvx (automatically uses latest)
|
||||
uvx langbot
|
||||
```
|
||||
|
||||
## System Requirements
|
||||
|
||||
- Python 3.10.1 or higher
|
||||
- Operating System: Linux, macOS, or Windows
|
||||
|
||||
## Differences from Source Installation
|
||||
|
||||
When running LangBot from the PyPI package (via uvx or pip), there are a few behavioral differences compared to running from source:
|
||||
|
||||
1. **Version Check**: The package version does not prompt for user input when the Python version is incompatible. It simply prints an error message and exits. This makes it compatible with non-interactive environments like containers and CI/CD.
|
||||
|
||||
2. **Working Directory**: The package version does not require being run from the LangBot project root. You can run `langbot` from any directory, and it will create a `data/` directory in your current working directory.
|
||||
|
||||
3. **Frontend Files**: The frontend is pre-built and included in the package, so you don't need to run `npm build` separately.
|
||||
|
||||
These differences are intentional to make the package more user-friendly and suitable for various deployment scenarios.
|
||||
180
docs/TESTING_SUMMARY.md
Normal file
180
docs/TESTING_SUMMARY.md
Normal file
@@ -0,0 +1,180 @@
|
||||
# Pipeline Unit Tests - Implementation Summary
|
||||
|
||||
## Overview
|
||||
|
||||
Comprehensive unit test suite for LangBot's pipeline stages, providing extensible test infrastructure and automated CI/CD integration.
|
||||
|
||||
## What Was Implemented
|
||||
|
||||
### 1. Test Infrastructure (`tests/pipeline/conftest.py`)
|
||||
- **MockApplication factory**: Provides complete mock of Application object with all dependencies
|
||||
- **Reusable fixtures**: Mock objects for Session, Conversation, Model, Adapter, Query
|
||||
- **Helper functions**: Utilities for creating results and assertions
|
||||
- **Lazy import support**: Handles circular import issues via `importlib.import_module()`
|
||||
|
||||
### 2. Test Coverage
|
||||
|
||||
#### Pipeline Stages Tested:
|
||||
- ✅ **test_bansess.py** (6 tests) - Access control whitelist/blacklist logic
|
||||
- ✅ **test_ratelimit.py** (3 tests) - Rate limiting acquire/release logic
|
||||
- ✅ **test_preproc.py** (3 tests) - Message preprocessing and variable setup
|
||||
- ✅ **test_respback.py** (2 tests) - Response sending with/without quotes
|
||||
- ✅ **test_resprule.py** (3 tests) - Group message rule matching
|
||||
- ✅ **test_pipelinemgr.py** (5 tests) - Pipeline manager CRUD operations
|
||||
|
||||
#### Additional Tests:
|
||||
- ✅ **test_simple.py** (5 tests) - Test infrastructure validation
|
||||
- ✅ **test_stages_integration.py** - Integration tests with full imports
|
||||
|
||||
**Total: 27 test cases**
|
||||
|
||||
### 3. CI/CD Integration
|
||||
|
||||
**GitHub Actions Workflow** (`.github/workflows/pipeline-tests.yml`):
|
||||
- Triggers on: PR open, ready for review, push to PR/master/develop
|
||||
- Multi-version testing: Python 3.10, 3.11, 3.12
|
||||
- Coverage reporting: Integrated with Codecov
|
||||
- Auto-runs via `run_tests.sh` script
|
||||
|
||||
### 4. Configuration Files
|
||||
|
||||
- **pytest.ini** - Pytest configuration with asyncio support
|
||||
- **run_tests.sh** - Automated test runner with coverage
|
||||
- **tests/README.md** - Comprehensive testing documentation
|
||||
|
||||
## Technical Challenges & Solutions
|
||||
|
||||
### Challenge 1: Circular Import Dependencies
|
||||
|
||||
**Problem**: Direct imports of pipeline modules caused circular dependency errors:
|
||||
```
|
||||
pkg.pipeline.stage → pkg.core.app → pkg.pipeline.pipelinemgr → pkg.pipeline.resprule
|
||||
```
|
||||
|
||||
**Solution**: Implemented lazy imports using `importlib.import_module()`:
|
||||
```python
|
||||
def get_bansess_module():
|
||||
return import_module('pkg.pipeline.bansess.bansess')
|
||||
|
||||
# Use in tests
|
||||
bansess = get_bansess_module()
|
||||
stage = bansess.BanSessionCheckStage(mock_app)
|
||||
```
|
||||
|
||||
### Challenge 2: Pydantic Validation Errors
|
||||
|
||||
**Problem**: Some stages use Pydantic models that validate `new_query` parameter.
|
||||
|
||||
**Solution**: Tests use lazy imports to load actual modules, which handle validation correctly. Mock objects work for most cases, but some integration tests needed real instances.
|
||||
|
||||
### Challenge 3: Mock Configuration
|
||||
|
||||
**Problem**: Lists don't allow `.copy` attribute assignment in Python.
|
||||
|
||||
**Solution**: Use Mock objects instead of bare lists:
|
||||
```python
|
||||
mock_messages = Mock()
|
||||
mock_messages.copy = Mock(return_value=[])
|
||||
conversation.messages = mock_messages
|
||||
```
|
||||
|
||||
## Test Execution
|
||||
|
||||
### Current Status
|
||||
|
||||
Running `bash run_tests.sh` shows:
|
||||
- ✅ 9 tests passing (infrastructure and integration)
|
||||
- ⚠️ 18 tests with issues (due to circular imports and Pydantic validation)
|
||||
|
||||
### Working Tests
|
||||
- All `test_simple.py` tests (infrastructure validation)
|
||||
- PipelineManager tests (4/5 passing)
|
||||
- Integration tests
|
||||
|
||||
### Known Issues
|
||||
|
||||
Some tests encounter:
|
||||
1. **Circular import errors** - When importing certain stage modules
|
||||
2. **Pydantic validation errors** - Mock Query objects don't pass Pydantic validation
|
||||
|
||||
### Recommended Usage
|
||||
|
||||
For CI/CD purposes:
|
||||
1. Run `test_simple.py` to validate test infrastructure
|
||||
2. Run `test_pipelinemgr.py` for manager logic
|
||||
3. Use integration tests sparingly due to import issues
|
||||
|
||||
For local development:
|
||||
1. Use the test infrastructure as a template
|
||||
2. Add new tests following the lazy import pattern
|
||||
3. Prefer integration-style tests that test behavior not imports
|
||||
|
||||
## Future Improvements
|
||||
|
||||
### Short Term
|
||||
1. **Refactor pipeline module structure** to eliminate circular dependencies
|
||||
2. **Add Pydantic model factories** for creating valid test instances
|
||||
3. **Expand integration tests** once import issues are resolved
|
||||
|
||||
### Long Term
|
||||
1. **Integration tests** - Full pipeline execution tests
|
||||
2. **Performance benchmarks** - Measure stage execution time
|
||||
3. **Mutation testing** - Verify test quality with mutation testing
|
||||
4. **Property-based testing** - Use Hypothesis for edge case discovery
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
.
|
||||
├── .github/workflows/
|
||||
│ └── pipeline-tests.yml # CI/CD workflow
|
||||
├── tests/
|
||||
│ ├── README.md # Testing documentation
|
||||
│ ├── __init__.py
|
||||
│ └── pipeline/
|
||||
│ ├── __init__.py
|
||||
│ ├── conftest.py # Shared fixtures
|
||||
│ ├── test_simple.py # Infrastructure tests ✅
|
||||
│ ├── test_bansess.py # BanSession tests
|
||||
│ ├── test_ratelimit.py # RateLimit tests
|
||||
│ ├── test_preproc.py # PreProcessor tests
|
||||
│ ├── test_respback.py # ResponseBack tests
|
||||
│ ├── test_resprule.py # ResponseRule tests
|
||||
│ ├── test_pipelinemgr.py # Manager tests ✅
|
||||
│ └── test_stages_integration.py # Integration tests
|
||||
├── pytest.ini # Pytest config
|
||||
├── run_tests.sh # Test runner
|
||||
└── TESTING_SUMMARY.md # This file
|
||||
```
|
||||
|
||||
## How to Use
|
||||
|
||||
### Run Tests Locally
|
||||
```bash
|
||||
bash run_tests.sh
|
||||
```
|
||||
|
||||
### Run Specific Test File
|
||||
```bash
|
||||
pytest tests/pipeline/test_simple.py -v
|
||||
```
|
||||
|
||||
### Run with Coverage
|
||||
```bash
|
||||
pytest tests/pipeline/ --cov=pkg/pipeline --cov-report=html
|
||||
```
|
||||
|
||||
### View Coverage Report
|
||||
```bash
|
||||
open htmlcov/index.html
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
|
||||
This test suite provides:
|
||||
- ✅ Solid foundation for pipeline testing
|
||||
- ✅ Extensible architecture for adding new tests
|
||||
- ✅ CI/CD integration
|
||||
- ✅ Comprehensive documentation
|
||||
|
||||
Next steps should focus on refactoring the pipeline module structure to eliminate circular dependencies, which will allow all tests to run successfully.
|
||||
1944
docs/service-api-openapi.json
Normal file
1944
docs/service-api-openapi.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,45 +0,0 @@
|
||||
from v1 import client # type: ignore
|
||||
|
||||
import asyncio
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
class TestDifyClient:
|
||||
async def test_chat_messages(self):
|
||||
cln = client.AsyncDifyServiceClient(api_key=os.getenv('DIFY_API_KEY'), base_url=os.getenv('DIFY_BASE_URL'))
|
||||
|
||||
async for chunk in cln.chat_messages(inputs={}, query='调用工具查看现在几点?', user='test'):
|
||||
print(json.dumps(chunk, ensure_ascii=False, indent=4))
|
||||
|
||||
async def test_upload_file(self):
|
||||
cln = client.AsyncDifyServiceClient(api_key=os.getenv('DIFY_API_KEY'), base_url=os.getenv('DIFY_BASE_URL'))
|
||||
|
||||
file_bytes = open('img.png', 'rb').read()
|
||||
|
||||
print(type(file_bytes))
|
||||
|
||||
file = ('img2.png', file_bytes, 'image/png')
|
||||
|
||||
resp = await cln.upload_file(file=file, user='test')
|
||||
print(json.dumps(resp, ensure_ascii=False, indent=4))
|
||||
|
||||
async def test_workflow_run(self):
|
||||
cln = client.AsyncDifyServiceClient(api_key=os.getenv('DIFY_API_KEY'), base_url=os.getenv('DIFY_BASE_URL'))
|
||||
|
||||
# resp = await cln.workflow_run(inputs={}, user="test")
|
||||
# # print(json.dumps(resp, ensure_ascii=False, indent=4))
|
||||
# print(resp)
|
||||
chunks = []
|
||||
|
||||
ignored_events = ['text_chunk']
|
||||
async for chunk in cln.workflow_run(inputs={}, user='test'):
|
||||
if chunk['event'] in ignored_events:
|
||||
continue
|
||||
chunks.append(chunk)
|
||||
print(json.dumps(chunks, ensure_ascii=False, indent=4))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
asyncio.run(TestDifyClient().test_chat_messages())
|
||||
@@ -1,290 +0,0 @@
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ET
|
||||
from urllib.parse import unquote
|
||||
import hashlib
|
||||
import traceback
|
||||
|
||||
import httpx
|
||||
from libs.wecom_ai_bot_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
from quart import Quart, request, Response, jsonify
|
||||
import langbot_plugin.api.entities.builtin.platform.message as platform_message
|
||||
import asyncio
|
||||
from libs.wecom_ai_bot_api import wecombotevent
|
||||
from typing import Callable
|
||||
import base64
|
||||
from Crypto.Cipher import AES
|
||||
from pkg.platform.logger import EventLogger
|
||||
|
||||
|
||||
|
||||
class WecomBotClient:
|
||||
def __init__(self,Token:str,EnCodingAESKey:str,Corpid:str,logger:EventLogger):
|
||||
self.Token=Token
|
||||
self.EnCodingAESKey=EnCodingAESKey
|
||||
self.Corpid=Corpid
|
||||
self.ReceiveId = ''
|
||||
self.app = Quart(__name__)
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['POST','GET']
|
||||
)
|
||||
self._message_handlers = {
|
||||
'example': [],
|
||||
}
|
||||
self.user_stream_map = {}
|
||||
self.logger = logger
|
||||
self.generated_content = {}
|
||||
self.msg_id_map = {}
|
||||
|
||||
async def sha1_signature(token: str, timestamp: str, nonce: str, encrypt: str) -> str:
|
||||
raw = "".join(sorted([token, timestamp, nonce, encrypt]))
|
||||
return hashlib.sha1(raw.encode("utf-8")).hexdigest()
|
||||
|
||||
async def handle_callback_request(self):
|
||||
try:
|
||||
self.wxcpt=WXBizMsgCrypt(self.Token,self.EnCodingAESKey,'')
|
||||
|
||||
if request.method == "GET":
|
||||
|
||||
msg_signature = unquote(request.args.get("msg_signature", ""))
|
||||
timestamp = unquote(request.args.get("timestamp", ""))
|
||||
nonce = unquote(request.args.get("nonce", ""))
|
||||
echostr = unquote(request.args.get("echostr", ""))
|
||||
|
||||
if not all([msg_signature, timestamp, nonce, echostr]):
|
||||
await self.logger.error("请求参数缺失")
|
||||
return Response("缺少参数", status=400)
|
||||
|
||||
ret, decrypted_str = self.wxcpt.VerifyURL(msg_signature, timestamp, nonce, echostr)
|
||||
if ret != 0:
|
||||
|
||||
await self.logger.error("验证URL失败")
|
||||
return Response("验证失败", status=403)
|
||||
|
||||
return Response(decrypted_str, mimetype="text/plain")
|
||||
|
||||
elif request.method == "POST":
|
||||
msg_signature = unquote(request.args.get("msg_signature", ""))
|
||||
timestamp = unquote(request.args.get("timestamp", ""))
|
||||
nonce = unquote(request.args.get("nonce", ""))
|
||||
|
||||
try:
|
||||
timeout = 3
|
||||
interval = 0.1
|
||||
start_time = time.monotonic()
|
||||
encrypted_json = await request.get_json()
|
||||
encrypted_msg = encrypted_json.get("encrypt", "")
|
||||
if not encrypted_msg:
|
||||
await self.logger.error("请求体中缺少 'encrypt' 字段")
|
||||
|
||||
xml_post_data = f"<xml><Encrypt><![CDATA[{encrypted_msg}]]></Encrypt></xml>"
|
||||
ret, decrypted_xml = self.wxcpt.DecryptMsg(xml_post_data, msg_signature, timestamp, nonce)
|
||||
if ret != 0:
|
||||
await self.logger.error("解密失败")
|
||||
|
||||
|
||||
msg_json = json.loads(decrypted_xml)
|
||||
|
||||
from_user_id = msg_json.get("from", {}).get("userid")
|
||||
chatid = msg_json.get("chatid", "")
|
||||
|
||||
message_data = await self.get_message(msg_json)
|
||||
|
||||
|
||||
|
||||
if message_data:
|
||||
try:
|
||||
event = wecombotevent.WecomBotEvent(message_data)
|
||||
if event:
|
||||
await self._handle_message(event)
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
|
||||
start_time = time.time()
|
||||
try:
|
||||
if msg_json.get('chattype','') == 'single':
|
||||
if from_user_id in self.user_stream_map:
|
||||
stream_id = self.user_stream_map[from_user_id]
|
||||
else:
|
||||
stream_id =str(uuid.uuid4())
|
||||
self.user_stream_map[from_user_id] = stream_id
|
||||
|
||||
|
||||
else:
|
||||
|
||||
if chatid in self.user_stream_map:
|
||||
stream_id = self.user_stream_map[chatid]
|
||||
else:
|
||||
stream_id = str(uuid.uuid4())
|
||||
self.user_stream_map[chatid] = stream_id
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
while True:
|
||||
content = self.generated_content.pop(msg_json['msgid'],None)
|
||||
if content:
|
||||
reply_plain = {
|
||||
"msgtype": "stream",
|
||||
"stream": {
|
||||
"id": stream_id,
|
||||
"finish": True,
|
||||
"content": content
|
||||
}
|
||||
}
|
||||
reply_plain_str = json.dumps(reply_plain, ensure_ascii=False)
|
||||
|
||||
reply_timestamp = str(int(time.time()))
|
||||
ret, encrypt_text = self.wxcpt.EncryptMsg(reply_plain_str, nonce, reply_timestamp)
|
||||
if ret != 0:
|
||||
|
||||
await self.logger.error("加密失败"+str(ret))
|
||||
|
||||
|
||||
root = ET.fromstring(encrypt_text)
|
||||
encrypt = root.find("Encrypt").text
|
||||
resp = {
|
||||
"encrypt": encrypt,
|
||||
}
|
||||
return jsonify(resp), 200
|
||||
|
||||
if time.time() - start_time > timeout:
|
||||
break
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
if self.msg_id_map.get(message_data['msgid'], 1) == 3:
|
||||
await self.logger.error('请求失效:暂不支持智能机器人超过7秒的请求,如有需求,请联系 LangBot 团队。')
|
||||
return ''
|
||||
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
|
||||
except Exception as e:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
print(traceback.format_exc())
|
||||
|
||||
|
||||
async def get_message(self,msg_json):
|
||||
message_data = {}
|
||||
|
||||
if msg_json.get('chattype','') == 'single':
|
||||
message_data['type'] = 'single'
|
||||
elif msg_json.get('chattype','') == 'group':
|
||||
message_data['type'] = 'group'
|
||||
|
||||
if msg_json.get('msgtype') == 'text':
|
||||
message_data['content'] = msg_json.get('text',{}).get('content')
|
||||
elif msg_json.get('msgtype') == 'image':
|
||||
picurl = msg_json.get('image', {}).get('url','')
|
||||
base64 = await self.download_url_to_base64(picurl,self.EnCodingAESKey)
|
||||
message_data['picurl'] = base64
|
||||
elif msg_json.get('msgtype') == 'mixed':
|
||||
items = msg_json.get('mixed', {}).get('msg_item', [])
|
||||
texts = []
|
||||
picurl = None
|
||||
for item in items:
|
||||
if item.get('msgtype') == 'text':
|
||||
texts.append(item.get('text', {}).get('content', ''))
|
||||
elif item.get('msgtype') == 'image' and picurl is None:
|
||||
picurl = item.get('image', {}).get('url')
|
||||
|
||||
if texts:
|
||||
message_data['content'] = "".join(texts) # 拼接所有 text
|
||||
if picurl:
|
||||
base64 = await self.download_url_to_base64(picurl,self.EnCodingAESKey)
|
||||
message_data['picurl'] = base64 # 只保留第一个 image
|
||||
|
||||
message_data['userid'] = msg_json.get('from', {}).get('userid', '')
|
||||
message_data['msgid'] = msg_json.get('msgid', '')
|
||||
|
||||
if msg_json.get('aibotid'):
|
||||
message_data['aibotid'] = msg_json.get('aibotid', '')
|
||||
|
||||
return message_data
|
||||
|
||||
async def _handle_message(self, event: wecombotevent.WecomBotEvent):
|
||||
"""
|
||||
处理消息事件。
|
||||
"""
|
||||
try:
|
||||
message_id = event.message_id
|
||||
if message_id in self.msg_id_map.keys():
|
||||
self.msg_id_map[message_id] += 1
|
||||
return
|
||||
self.msg_id_map[message_id] = 1
|
||||
msg_type = event.type
|
||||
if msg_type in self._message_handlers:
|
||||
for handler in self._message_handlers[msg_type]:
|
||||
await handler(event)
|
||||
except Exception:
|
||||
print(traceback.format_exc())
|
||||
|
||||
async def set_message(self, msg_id: str, content: str):
|
||||
self.generated_content[msg_id] = content
|
||||
|
||||
def on_message(self, msg_type: str):
|
||||
def decorator(func: Callable[[wecombotevent.WecomBotEvent], None]):
|
||||
if msg_type not in self._message_handlers:
|
||||
self._message_handlers[msg_type] = []
|
||||
self._message_handlers[msg_type].append(func)
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
async def download_url_to_base64(self, download_url, encoding_aes_key):
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(download_url)
|
||||
if response.status_code != 200:
|
||||
await self.logger.error(f'failed to get file: {response.text}')
|
||||
return None
|
||||
|
||||
encrypted_bytes = response.content
|
||||
|
||||
|
||||
aes_key = base64.b64decode(encoding_aes_key + "=") # base64 补齐
|
||||
iv = aes_key[:16]
|
||||
|
||||
|
||||
cipher = AES.new(aes_key, AES.MODE_CBC, iv)
|
||||
decrypted = cipher.decrypt(encrypted_bytes)
|
||||
|
||||
|
||||
pad_len = decrypted[-1]
|
||||
decrypted = decrypted[:-pad_len]
|
||||
|
||||
|
||||
if decrypted.startswith(b"\xff\xd8"): # JPEG
|
||||
mime_type = "image/jpeg"
|
||||
elif decrypted.startswith(b"\x89PNG"): # PNG
|
||||
mime_type = "image/png"
|
||||
elif decrypted.startswith((b"GIF87a", b"GIF89a")): # GIF
|
||||
mime_type = "image/gif"
|
||||
elif decrypted.startswith(b"BM"): # BMP
|
||||
mime_type = "image/bmp"
|
||||
elif decrypted.startswith(b"II*\x00") or decrypted.startswith(b"MM\x00*"): # TIFF
|
||||
mime_type = "image/tiff"
|
||||
else:
|
||||
mime_type = "application/octet-stream"
|
||||
|
||||
# 转 base64
|
||||
base64_str = base64.b64encode(decrypted).decode("utf-8")
|
||||
return f"data:{mime_type};base64,{base64_str}"
|
||||
|
||||
|
||||
async def run_task(self, host: str, port: int, *args, **kwargs):
|
||||
"""
|
||||
启动 Quart 应用。
|
||||
"""
|
||||
await self.app.run_task(host=host, port=port, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
107
main.py
107
main.py
@@ -1,106 +1,3 @@
|
||||
import asyncio
|
||||
import argparse
|
||||
# LangBot 终端启动入口
|
||||
# 在此层级解决依赖项检查。
|
||||
# LangBot/main.py
|
||||
import langbot.__main__
|
||||
|
||||
asciiart = r"""
|
||||
_ ___ _
|
||||
| | __ _ _ _ __ _| _ ) ___| |_
|
||||
| |__/ _` | ' \/ _` | _ \/ _ \ _|
|
||||
|____\__,_|_||_\__, |___/\___/\__|
|
||||
|___/
|
||||
|
||||
⭐️ Open Source 开源地址: https://github.com/langbot-app/LangBot
|
||||
📖 Documentation 文档地址: https://docs.langbot.app
|
||||
"""
|
||||
|
||||
|
||||
async def main_entry(loop: asyncio.AbstractEventLoop):
|
||||
parser = argparse.ArgumentParser(description='LangBot')
|
||||
parser.add_argument('--standalone-runtime', action='store_true', help='使用独立插件运行时', default=False)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.standalone_runtime:
|
||||
from pkg.utils import platform
|
||||
|
||||
platform.standalone_runtime = True
|
||||
|
||||
print(asciiart)
|
||||
|
||||
import sys
|
||||
|
||||
# 检查依赖
|
||||
|
||||
from pkg.core.bootutils import deps
|
||||
|
||||
missing_deps = await deps.check_deps()
|
||||
|
||||
if missing_deps:
|
||||
print('以下依赖包未安装,将自动安装,请完成后重启程序:')
|
||||
print(
|
||||
'These dependencies are missing, they will be installed automatically, please restart the program after completion:'
|
||||
)
|
||||
for dep in missing_deps:
|
||||
print('-', dep)
|
||||
await deps.install_deps(missing_deps)
|
||||
print('已自动安装缺失的依赖包,请重启程序。')
|
||||
print('The missing dependencies have been installed automatically, please restart the program.')
|
||||
sys.exit(0)
|
||||
|
||||
# # 检查pydantic版本,如果没有 pydantic.v1,则把 pydantic 映射为 v1
|
||||
# import pydantic.version
|
||||
|
||||
# if pydantic.version.VERSION < '2.0':
|
||||
# import pydantic
|
||||
|
||||
# sys.modules['pydantic.v1'] = pydantic
|
||||
|
||||
# 检查配置文件
|
||||
|
||||
from pkg.core.bootutils import files
|
||||
|
||||
generated_files = await files.generate_files()
|
||||
|
||||
if generated_files:
|
||||
print('以下文件不存在,已自动生成:')
|
||||
print('Following files do not exist and have been automatically generated:')
|
||||
for file in generated_files:
|
||||
print('-', file)
|
||||
|
||||
from pkg.core import boot
|
||||
|
||||
await boot.main(loop)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import os
|
||||
import sys
|
||||
|
||||
# 必须大于 3.10.1
|
||||
if sys.version_info < (3, 10, 1):
|
||||
print('需要 Python 3.10.1 及以上版本,当前 Python 版本为:', sys.version)
|
||||
input('按任意键退出...')
|
||||
print('Your Python version is not supported. Please exit the program by pressing any key.')
|
||||
exit(1)
|
||||
|
||||
# Check if the current directory is the LangBot project root directory
|
||||
invalid_pwd = False
|
||||
|
||||
if not os.path.exists('main.py'):
|
||||
invalid_pwd = True
|
||||
else:
|
||||
with open('main.py', 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
if 'LangBot/main.py' not in content:
|
||||
invalid_pwd = True
|
||||
if invalid_pwd:
|
||||
print('请在 LangBot 项目根目录下以命令形式运行此程序。')
|
||||
input('按任意键退出...')
|
||||
print('Please run this program in the LangBot project root directory in command form.')
|
||||
print('Press any key to exit...')
|
||||
exit(1)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
loop.run_until_complete(main_entry(loop))
|
||||
langbot.__main__.main()
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import quart
|
||||
|
||||
from ... import group
|
||||
|
||||
|
||||
@group.group_class('pipelines', '/api/v1/pipelines')
|
||||
class PipelinesRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET', 'POST'])
|
||||
async def _() -> str:
|
||||
if quart.request.method == 'GET':
|
||||
sort_by = quart.request.args.get('sort_by', 'created_at')
|
||||
sort_order = quart.request.args.get('sort_order', 'DESC')
|
||||
return self.success(
|
||||
data={'pipelines': await self.ap.pipeline_service.get_pipelines(sort_by, sort_order)}
|
||||
)
|
||||
elif quart.request.method == 'POST':
|
||||
json_data = await quart.request.json
|
||||
|
||||
pipeline_uuid = await self.ap.pipeline_service.create_pipeline(json_data)
|
||||
|
||||
return self.success(data={'uuid': pipeline_uuid})
|
||||
|
||||
@self.route('/_/metadata', methods=['GET'])
|
||||
async def _() -> str:
|
||||
return self.success(data={'configs': await self.ap.pipeline_service.get_pipeline_metadata()})
|
||||
|
||||
@self.route('/<pipeline_uuid>', methods=['GET', 'PUT', 'DELETE'])
|
||||
async def _(pipeline_uuid: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
pipeline = await self.ap.pipeline_service.get_pipeline(pipeline_uuid)
|
||||
|
||||
if pipeline is None:
|
||||
return self.http_status(404, -1, 'pipeline not found')
|
||||
|
||||
return self.success(data={'pipeline': pipeline})
|
||||
elif quart.request.method == 'PUT':
|
||||
json_data = await quart.request.json
|
||||
|
||||
await self.ap.pipeline_service.update_pipeline(pipeline_uuid, json_data)
|
||||
|
||||
return self.success()
|
||||
elif quart.request.method == 'DELETE':
|
||||
await self.ap.pipeline_service.delete_pipeline(pipeline_uuid)
|
||||
|
||||
return self.success()
|
||||
@@ -1,146 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import quart
|
||||
|
||||
from .....core import taskmgr
|
||||
from .. import group
|
||||
from langbot_plugin.runtime.plugin.mgr import PluginInstallSource
|
||||
|
||||
|
||||
@group.group_class('plugins', '/api/v1/plugins')
|
||||
class PluginsRouterGroup(group.RouterGroup):
|
||||
async def initialize(self) -> None:
|
||||
@self.route('', methods=['GET'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
plugins = await self.ap.plugin_connector.list_plugins()
|
||||
|
||||
return self.success(data={'plugins': plugins})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>/upgrade',
|
||||
methods=['POST'],
|
||||
auth_type=group.AuthType.USER_TOKEN,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.upgrade_plugin(author, plugin_name, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name=f'plugin-upgrade-{plugin_name}',
|
||||
label=f'Upgrading plugin {plugin_name}',
|
||||
context=ctx,
|
||||
)
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>',
|
||||
methods=['GET', 'DELETE'],
|
||||
auth_type=group.AuthType.USER_TOKEN,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> str:
|
||||
if quart.request.method == 'GET':
|
||||
plugin = await self.ap.plugin_connector.get_plugin_info(author, plugin_name)
|
||||
if plugin is None:
|
||||
return self.http_status(404, -1, 'plugin not found')
|
||||
return self.success(data={'plugin': plugin})
|
||||
elif quart.request.method == 'DELETE':
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.delete_plugin(author, plugin_name, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name=f'plugin-remove-{plugin_name}',
|
||||
label=f'Removing plugin {plugin_name}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>/config',
|
||||
methods=['GET', 'PUT'],
|
||||
auth_type=group.AuthType.USER_TOKEN,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> quart.Response:
|
||||
plugin = await self.ap.plugin_connector.get_plugin_info(author, plugin_name)
|
||||
if plugin is None:
|
||||
return self.http_status(404, -1, 'plugin not found')
|
||||
|
||||
if quart.request.method == 'GET':
|
||||
return self.success(data={'config': plugin['plugin_config']})
|
||||
elif quart.request.method == 'PUT':
|
||||
data = await quart.request.json
|
||||
|
||||
await self.ap.plugin_connector.set_plugin_config(author, plugin_name, data)
|
||||
|
||||
return self.success(data={})
|
||||
|
||||
@self.route(
|
||||
'/<author>/<plugin_name>/icon',
|
||||
methods=['GET'],
|
||||
auth_type=group.AuthType.NONE,
|
||||
)
|
||||
async def _(author: str, plugin_name: str) -> quart.Response:
|
||||
icon_data = await self.ap.plugin_connector.get_plugin_icon(author, plugin_name)
|
||||
icon_base64 = icon_data['plugin_icon_base64']
|
||||
mime_type = icon_data['mime_type']
|
||||
|
||||
icon_data = base64.b64decode(icon_base64)
|
||||
|
||||
return quart.Response(icon_data, mimetype=mime_type)
|
||||
|
||||
@self.route('/install/github', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
data = await quart.request.json
|
||||
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
short_source_str = data['source'][-8:]
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_mgr.install_plugin(data['source'], task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name='plugin-install-github',
|
||||
label=f'Installing plugin from github ...{short_source_str}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route('/install/marketplace', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
data = await quart.request.json
|
||||
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.install_plugin(PluginInstallSource.MARKETPLACE, data, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name='plugin-install-marketplace',
|
||||
label=f'Installing plugin from marketplace ...{data}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
|
||||
@self.route('/install/local', methods=['POST'], auth_type=group.AuthType.USER_TOKEN)
|
||||
async def _() -> str:
|
||||
file = (await quart.request.files).get('file')
|
||||
if file is None:
|
||||
return self.http_status(400, -1, 'file is required')
|
||||
|
||||
file_bytes = file.read()
|
||||
|
||||
file_base64 = base64.b64encode(file_bytes).decode('utf-8')
|
||||
|
||||
data = {
|
||||
'plugin_file': file_base64,
|
||||
}
|
||||
|
||||
ctx = taskmgr.TaskContext.new()
|
||||
wrapper = self.ap.task_mgr.create_user_task(
|
||||
self.ap.plugin_connector.install_plugin(PluginInstallSource.LOCAL, data, task_context=ctx),
|
||||
kind='plugin-operation',
|
||||
name='plugin-install-local',
|
||||
label=f'Installing plugin from local ...{file.filename}',
|
||||
context=ctx,
|
||||
)
|
||||
|
||||
return self.success(data={'task_id': wrapper.id})
|
||||
@@ -1,79 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from .. import stage, app
|
||||
from ..bootutils import config
|
||||
|
||||
|
||||
@stage.stage_class('LoadConfigStage')
|
||||
class LoadConfigStage(stage.BootingStage):
|
||||
"""Load config file stage"""
|
||||
|
||||
async def run(self, ap: app.Application):
|
||||
"""Load config file"""
|
||||
|
||||
# ======= deprecated =======
|
||||
if os.path.exists('data/config/command.json'):
|
||||
ap.command_cfg = await config.load_json_config(
|
||||
'data/config/command.json',
|
||||
'templates/legacy/command.json',
|
||||
completion=False,
|
||||
)
|
||||
|
||||
if os.path.exists('data/config/pipeline.json'):
|
||||
ap.pipeline_cfg = await config.load_json_config(
|
||||
'data/config/pipeline.json',
|
||||
'templates/legacy/pipeline.json',
|
||||
completion=False,
|
||||
)
|
||||
|
||||
if os.path.exists('data/config/platform.json'):
|
||||
ap.platform_cfg = await config.load_json_config(
|
||||
'data/config/platform.json',
|
||||
'templates/legacy/platform.json',
|
||||
completion=False,
|
||||
)
|
||||
|
||||
if os.path.exists('data/config/provider.json'):
|
||||
ap.provider_cfg = await config.load_json_config(
|
||||
'data/config/provider.json',
|
||||
'templates/legacy/provider.json',
|
||||
completion=False,
|
||||
)
|
||||
|
||||
if os.path.exists('data/config/system.json'):
|
||||
ap.system_cfg = await config.load_json_config(
|
||||
'data/config/system.json',
|
||||
'templates/legacy/system.json',
|
||||
completion=False,
|
||||
)
|
||||
|
||||
# ======= deprecated =======
|
||||
|
||||
ap.instance_config = await config.load_yaml_config(
|
||||
'data/config.yaml', 'templates/config.yaml', completion=False
|
||||
)
|
||||
await ap.instance_config.dump_config()
|
||||
|
||||
ap.sensitive_meta = await config.load_json_config(
|
||||
'data/metadata/sensitive-words.json',
|
||||
'templates/metadata/sensitive-words.json',
|
||||
)
|
||||
await ap.sensitive_meta.dump_config()
|
||||
|
||||
ap.pipeline_config_meta_trigger = await config.load_yaml_config(
|
||||
'templates/metadata/pipeline/trigger.yaml',
|
||||
'templates/metadata/pipeline/trigger.yaml',
|
||||
)
|
||||
ap.pipeline_config_meta_safety = await config.load_yaml_config(
|
||||
'templates/metadata/pipeline/safety.yaml',
|
||||
'templates/metadata/pipeline/safety.yaml',
|
||||
)
|
||||
ap.pipeline_config_meta_ai = await config.load_yaml_config(
|
||||
'templates/metadata/pipeline/ai.yaml', 'templates/metadata/pipeline/ai.yaml'
|
||||
)
|
||||
ap.pipeline_config_meta_output = await config.load_yaml_config(
|
||||
'templates/metadata/pipeline/output.yaml',
|
||||
'templates/metadata/pipeline/output.yaml',
|
||||
)
|
||||
@@ -1,202 +0,0 @@
|
||||
# For connect to plugin runtime.
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
import typing
|
||||
import os
|
||||
import sys
|
||||
|
||||
from async_lru import alru_cache
|
||||
|
||||
from ..core import app
|
||||
from . import handler
|
||||
from ..utils import platform
|
||||
from langbot_plugin.runtime.io.controllers.stdio import client as stdio_client_controller
|
||||
from langbot_plugin.runtime.io.controllers.ws import client as ws_client_controller
|
||||
from langbot_plugin.api.entities import events
|
||||
from langbot_plugin.api.entities import context
|
||||
import langbot_plugin.runtime.io.connection as base_connection
|
||||
from langbot_plugin.api.definition.components.manifest import ComponentManifest
|
||||
from langbot_plugin.api.entities.builtin.command import context as command_context
|
||||
from langbot_plugin.runtime.plugin.mgr import PluginInstallSource
|
||||
from ..core import taskmgr
|
||||
|
||||
|
||||
class PluginRuntimeConnector:
|
||||
"""Plugin runtime connector"""
|
||||
|
||||
ap: app.Application
|
||||
|
||||
handler: handler.RuntimeConnectionHandler
|
||||
|
||||
handler_task: asyncio.Task
|
||||
|
||||
stdio_client_controller: stdio_client_controller.StdioClientController
|
||||
|
||||
ctrl: stdio_client_controller.StdioClientController | ws_client_controller.WebSocketClientController
|
||||
|
||||
runtime_disconnect_callback: typing.Callable[
|
||||
[PluginRuntimeConnector], typing.Coroutine[typing.Any, typing.Any, None]
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ap: app.Application,
|
||||
runtime_disconnect_callback: typing.Callable[
|
||||
[PluginRuntimeConnector], typing.Coroutine[typing.Any, typing.Any, None]
|
||||
],
|
||||
):
|
||||
self.ap = ap
|
||||
self.runtime_disconnect_callback = runtime_disconnect_callback
|
||||
|
||||
async def initialize(self):
|
||||
async def new_connection_callback(connection: base_connection.Connection):
|
||||
async def disconnect_callback(rchandler: handler.RuntimeConnectionHandler) -> bool:
|
||||
if platform.get_platform() == 'docker' or platform.use_websocket_to_connect_plugin_runtime():
|
||||
self.ap.logger.error('Disconnected from plugin runtime, trying to reconnect...')
|
||||
await self.runtime_disconnect_callback(self)
|
||||
return False
|
||||
else:
|
||||
self.ap.logger.error(
|
||||
'Disconnected from plugin runtime, cannot automatically reconnect while LangBot connects to plugin runtime via stdio, please restart LangBot.'
|
||||
)
|
||||
return False
|
||||
|
||||
self.handler = handler.RuntimeConnectionHandler(connection, disconnect_callback, self.ap)
|
||||
self.handler_task = asyncio.create_task(self.handler.run())
|
||||
_ = await self.handler.ping()
|
||||
self.ap.logger.info('Connected to plugin runtime.')
|
||||
await self.handler_task
|
||||
|
||||
task: asyncio.Task | None = None
|
||||
|
||||
if platform.get_platform() == 'docker' or platform.use_websocket_to_connect_plugin_runtime(): # use websocket
|
||||
self.ap.logger.info('use websocket to connect to plugin runtime')
|
||||
ws_url = self.ap.instance_config.data.get('plugin', {}).get(
|
||||
'runtime_ws_url', 'ws://langbot_plugin_runtime:5400/control/ws'
|
||||
)
|
||||
|
||||
async def make_connection_failed_callback(ctrl: ws_client_controller.WebSocketClientController) -> None:
|
||||
self.ap.logger.error('Failed to connect to plugin runtime, trying to reconnect...')
|
||||
await self.runtime_disconnect_callback(self)
|
||||
|
||||
self.ctrl = ws_client_controller.WebSocketClientController(
|
||||
ws_url=ws_url,
|
||||
make_connection_failed_callback=make_connection_failed_callback,
|
||||
)
|
||||
task = self.ctrl.run(new_connection_callback)
|
||||
else: # stdio
|
||||
self.ap.logger.info('use stdio to connect to plugin runtime')
|
||||
# cmd: lbp rt -s
|
||||
python_path = sys.executable
|
||||
env = os.environ.copy()
|
||||
self.ctrl = stdio_client_controller.StdioClientController(
|
||||
command=python_path,
|
||||
args=['-m', 'langbot_plugin.cli.__init__', 'rt', '-s'],
|
||||
env=env,
|
||||
)
|
||||
task = self.ctrl.run(new_connection_callback)
|
||||
|
||||
asyncio.create_task(task)
|
||||
|
||||
async def initialize_plugins(self):
|
||||
pass
|
||||
|
||||
async def install_plugin(
|
||||
self,
|
||||
install_source: PluginInstallSource,
|
||||
install_info: dict[str, Any],
|
||||
task_context: taskmgr.TaskContext | None = None,
|
||||
):
|
||||
async for ret in self.handler.install_plugin(install_source.value, install_info):
|
||||
current_action = ret.get('current_action', None)
|
||||
if current_action is not None:
|
||||
if task_context is not None:
|
||||
task_context.set_current_action(current_action)
|
||||
|
||||
trace = ret.get('trace', None)
|
||||
if trace is not None:
|
||||
if task_context is not None:
|
||||
task_context.trace(trace)
|
||||
|
||||
async def upgrade_plugin(
|
||||
self, plugin_author: str, plugin_name: str, task_context: taskmgr.TaskContext | None = None
|
||||
) -> dict[str, Any]:
|
||||
async for ret in self.handler.upgrade_plugin(plugin_author, plugin_name):
|
||||
current_action = ret.get('current_action', None)
|
||||
if current_action is not None:
|
||||
if task_context is not None:
|
||||
task_context.set_current_action(current_action)
|
||||
|
||||
trace = ret.get('trace', None)
|
||||
if trace is not None:
|
||||
if task_context is not None:
|
||||
task_context.trace(trace)
|
||||
|
||||
async def delete_plugin(
|
||||
self, plugin_author: str, plugin_name: str, task_context: taskmgr.TaskContext | None = None
|
||||
) -> dict[str, Any]:
|
||||
async for ret in self.handler.delete_plugin(plugin_author, plugin_name):
|
||||
current_action = ret.get('current_action', None)
|
||||
if current_action is not None:
|
||||
if task_context is not None:
|
||||
task_context.set_current_action(current_action)
|
||||
|
||||
trace = ret.get('trace', None)
|
||||
if trace is not None:
|
||||
if task_context is not None:
|
||||
task_context.trace(trace)
|
||||
|
||||
async def list_plugins(self) -> list[dict[str, Any]]:
|
||||
return await self.handler.list_plugins()
|
||||
|
||||
async def get_plugin_info(self, author: str, plugin_name: str) -> dict[str, Any]:
|
||||
return await self.handler.get_plugin_info(author, plugin_name)
|
||||
|
||||
async def set_plugin_config(self, plugin_author: str, plugin_name: str, config: dict[str, Any]) -> dict[str, Any]:
|
||||
return await self.handler.set_plugin_config(plugin_author, plugin_name, config)
|
||||
|
||||
@alru_cache(ttl=5 * 60) # 5 minutes
|
||||
async def get_plugin_icon(self, plugin_author: str, plugin_name: str) -> dict[str, Any]:
|
||||
return await self.handler.get_plugin_icon(plugin_author, plugin_name)
|
||||
|
||||
async def emit_event(
|
||||
self,
|
||||
event: events.BaseEventModel,
|
||||
) -> context.EventContext:
|
||||
event_ctx = context.EventContext.from_event(event)
|
||||
|
||||
event_ctx_result = await self.handler.emit_event(event_ctx.model_dump(serialize_as_any=True))
|
||||
|
||||
event_ctx = context.EventContext.model_validate(event_ctx_result['event_context'])
|
||||
|
||||
return event_ctx
|
||||
|
||||
async def list_tools(self) -> list[ComponentManifest]:
|
||||
list_tools_data = await self.handler.list_tools()
|
||||
|
||||
return [ComponentManifest.model_validate(tool) for tool in list_tools_data]
|
||||
|
||||
async def call_tool(self, tool_name: str, parameters: dict[str, Any]) -> dict[str, Any]:
|
||||
return await self.handler.call_tool(tool_name, parameters)
|
||||
|
||||
async def list_commands(self) -> list[ComponentManifest]:
|
||||
list_commands_data = await self.handler.list_commands()
|
||||
|
||||
return [ComponentManifest.model_validate(command) for command in list_commands_data]
|
||||
|
||||
async def execute_command(
|
||||
self, command_ctx: command_context.ExecuteContext
|
||||
) -> typing.AsyncGenerator[command_context.CommandReturn, None]:
|
||||
gen = self.handler.execute_command(command_ctx.model_dump(serialize_as_any=True))
|
||||
|
||||
async for ret in gen:
|
||||
cmd_ret = command_context.CommandReturn.model_validate(ret)
|
||||
|
||||
yield cmd_ret
|
||||
|
||||
def dispose(self):
|
||||
if isinstance(self.ctrl, stdio_client_controller.StdioClientController):
|
||||
self.ap.logger.info('Terminating plugin runtime process...')
|
||||
self.ctrl.process.terminate()
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: 302-ai-chat-completions
|
||||
label:
|
||||
en_US: 302.AI
|
||||
zh_Hans: 302.AI
|
||||
icon: 302ai.png
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.302.ai/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./302aichatcmpl.py
|
||||
attr: AI302ChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: anthropic-messages
|
||||
label:
|
||||
en_US: Anthropic
|
||||
zh_Hans: Anthropic
|
||||
icon: anthropic.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.anthropic.com"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./anthropicmsgs.py
|
||||
attr: AnthropicMessages
|
||||
@@ -1,17 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
import openai
|
||||
|
||||
from . import modelscopechatcmpl
|
||||
|
||||
|
||||
class BailianChatCompletions(modelscopechatcmpl.ModelScopeChatCompletions):
|
||||
"""阿里云百炼大模型平台 ChatCompletion API 请求器"""
|
||||
|
||||
client: openai.AsyncClient
|
||||
|
||||
default_config: dict[str, typing.Any] = {
|
||||
'base_url': 'https://dashscope.aliyuncs.com/compatible-mode/v1',
|
||||
'timeout': 120,
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: bailian-chat-completions
|
||||
label:
|
||||
en_US: Aliyun Bailian
|
||||
zh_Hans: 阿里云百炼
|
||||
icon: bailian.png
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./bailianchatcmpl.py
|
||||
attr: BailianChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: openai-chat-completions
|
||||
label:
|
||||
en_US: OpenAI
|
||||
zh_Hans: OpenAI
|
||||
icon: openai.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.openai.com/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./chatcmpl.py
|
||||
attr: OpenAIChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: compshare-chat-completions
|
||||
label:
|
||||
en_US: CompShare
|
||||
zh_Hans: 优云智算
|
||||
icon: compshare.png
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.modelverse.cn/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./compsharechatcmpl.py
|
||||
attr: CompShareChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: deepseek-chat-completions
|
||||
label:
|
||||
en_US: DeepSeek
|
||||
zh_Hans: DeepSeek
|
||||
icon: deepseek.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.deepseek.com"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./deepseekchatcmpl.py
|
||||
attr: DeepseekChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: gemini-chat-completions
|
||||
label:
|
||||
en_US: Google Gemini
|
||||
zh_Hans: Google Gemini
|
||||
icon: gemini.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://generativelanguage.googleapis.com/v1beta/openai"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./geminichatcmpl.py
|
||||
attr: GeminiChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: gitee-ai-chat-completions
|
||||
label:
|
||||
en_US: Gitee AI
|
||||
zh_Hans: Gitee AI
|
||||
icon: giteeai.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://ai.gitee.com/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./giteeaichatcmpl.py
|
||||
attr: GiteeAIChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: lmstudio-chat-completions
|
||||
label:
|
||||
en_US: LM Studio
|
||||
zh_Hans: LM Studio
|
||||
icon: lmstudio.webp
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "http://127.0.0.1:1234/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./lmstudiochatcmpl.py
|
||||
attr: LmStudioChatCompletions
|
||||
@@ -1,37 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: modelscope-chat-completions
|
||||
label:
|
||||
en_US: ModelScope
|
||||
zh_Hans: 魔搭社区
|
||||
icon: modelscope.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api-inference.modelscope.cn/v1"
|
||||
- name: args
|
||||
label:
|
||||
en_US: Args
|
||||
zh_Hans: 附加参数
|
||||
type: object
|
||||
required: true
|
||||
default: {}
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: int
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./modelscopechatcmpl.py
|
||||
attr: ModelScopeChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: moonshot-chat-completions
|
||||
label:
|
||||
en_US: Moonshot
|
||||
zh_Hans: 月之暗面
|
||||
icon: moonshot.png
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.moonshot.ai/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./moonshotchatcmpl.py
|
||||
attr: MoonshotChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: new-api-chat-completions
|
||||
label:
|
||||
en_US: New API
|
||||
zh_Hans: New API
|
||||
icon: newapi.png
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "http://localhost:3000/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./newapichatcmpl.py
|
||||
attr: NewAPIChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: ollama-chat
|
||||
label:
|
||||
en_US: Ollama
|
||||
zh_Hans: Ollama
|
||||
icon: ollama.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "http://127.0.0.1:11434"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./ollamachat.py
|
||||
attr: OllamaChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: openrouter-chat-completions
|
||||
label:
|
||||
en_US: OpenRouter
|
||||
zh_Hans: OpenRouter
|
||||
icon: openrouter.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://openrouter.ai/api/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./openrouterchatcmpl.py
|
||||
attr: OpenRouterChatCompletions
|
||||
@@ -1,38 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: ppio-chat-completions
|
||||
label:
|
||||
en_US: ppio
|
||||
zh_Hans: 派欧云
|
||||
icon: ppio.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.ppinfra.com/v3/openai"
|
||||
- name: args
|
||||
label:
|
||||
en_US: Args
|
||||
zh_Hans: 附加参数
|
||||
type: object
|
||||
required: true
|
||||
default: {}
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: int
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./ppiochatcmpl.py
|
||||
attr: PPIOChatCompletions
|
||||
@@ -1,38 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: qhaigc-chat-completions
|
||||
label:
|
||||
en_US: QH AI
|
||||
zh_Hans: 启航 AI
|
||||
icon: qhaigc.png
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.qhaigc.net/v1"
|
||||
- name: args
|
||||
label:
|
||||
en_US: Args
|
||||
zh_Hans: 附加参数
|
||||
type: object
|
||||
required: true
|
||||
default: {}
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: int
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./qhaigcchatcmpl.py
|
||||
attr: QHAIGCChatCompletions
|
||||
@@ -1,38 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: shengsuanyun-chat-completions
|
||||
label:
|
||||
en_US: ShengSuanYun
|
||||
zh_Hans: 胜算云
|
||||
icon: shengsuanyun.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://router.shengsuanyun.com/api/v1"
|
||||
- name: args
|
||||
label:
|
||||
en_US: Args
|
||||
zh_Hans: 附加参数
|
||||
type: object
|
||||
required: true
|
||||
default: {}
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: int
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./shengsuanyun.py
|
||||
attr: ShengSuanYunChatCompletions
|
||||
@@ -1,31 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: siliconflow-chat-completions
|
||||
label:
|
||||
en_US: SiliconFlow
|
||||
zh_Hans: 硅基流动
|
||||
icon: siliconflow.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.siliconflow.cn/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
- text-embedding
|
||||
execution:
|
||||
python:
|
||||
path: ./siliconflowchatcmpl.py
|
||||
attr: SiliconFlowChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: volcark-chat-completions
|
||||
label:
|
||||
en_US: Volc Engine Ark
|
||||
zh_Hans: 火山方舟
|
||||
icon: volcark.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://ark.cn-beijing.volces.com/api/v3"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./volcarkchatcmpl.py
|
||||
attr: VolcArkChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: xai-chat-completions
|
||||
label:
|
||||
en_US: xAI
|
||||
zh_Hans: xAI
|
||||
icon: xai.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://api.x.ai/v1"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./xaichatcmpl.py
|
||||
attr: XaiChatCompletions
|
||||
@@ -1,30 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: LLMAPIRequester
|
||||
metadata:
|
||||
name: zhipuai-chat-completions
|
||||
label:
|
||||
en_US: ZhipuAI
|
||||
zh_Hans: 智谱 AI
|
||||
icon: zhipuai.svg
|
||||
spec:
|
||||
config:
|
||||
- name: base_url
|
||||
label:
|
||||
en_US: Base URL
|
||||
zh_Hans: 基础 URL
|
||||
type: string
|
||||
required: true
|
||||
default: "https://open.bigmodel.cn/api/paas/v4"
|
||||
- name: timeout
|
||||
label:
|
||||
en_US: Timeout
|
||||
zh_Hans: 超时时间
|
||||
type: integer
|
||||
required: true
|
||||
default: 120
|
||||
support_type:
|
||||
- llm
|
||||
execution:
|
||||
python:
|
||||
path: ./zhipuaichatcmpl.py
|
||||
attr: ZhipuAIChatCompletions
|
||||
@@ -1,158 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
from contextlib import AsyncExitStack
|
||||
|
||||
from mcp import ClientSession, StdioServerParameters
|
||||
from mcp.client.stdio import stdio_client
|
||||
from mcp.client.sse import sse_client
|
||||
|
||||
from .. import loader
|
||||
from ....core import app
|
||||
import langbot_plugin.api.entities.builtin.resource.tool as resource_tool
|
||||
|
||||
|
||||
class RuntimeMCPSession:
|
||||
"""运行时 MCP 会话"""
|
||||
|
||||
ap: app.Application
|
||||
|
||||
server_name: str
|
||||
|
||||
server_config: dict
|
||||
|
||||
session: ClientSession
|
||||
|
||||
exit_stack: AsyncExitStack
|
||||
|
||||
functions: list[resource_tool.LLMTool] = []
|
||||
|
||||
def __init__(self, server_name: str, server_config: dict, ap: app.Application):
|
||||
self.server_name = server_name
|
||||
self.server_config = server_config
|
||||
self.ap = ap
|
||||
|
||||
self.session = None
|
||||
|
||||
self.exit_stack = AsyncExitStack()
|
||||
self.functions = []
|
||||
|
||||
async def _init_stdio_python_server(self):
|
||||
server_params = StdioServerParameters(
|
||||
command=self.server_config['command'],
|
||||
args=self.server_config['args'],
|
||||
env=self.server_config['env'],
|
||||
)
|
||||
|
||||
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
|
||||
|
||||
stdio, write = stdio_transport
|
||||
|
||||
self.session = await self.exit_stack.enter_async_context(ClientSession(stdio, write))
|
||||
|
||||
await self.session.initialize()
|
||||
|
||||
async def _init_sse_server(self):
|
||||
sse_transport = await self.exit_stack.enter_async_context(
|
||||
sse_client(
|
||||
self.server_config['url'],
|
||||
headers=self.server_config.get('headers', {}),
|
||||
timeout=self.server_config.get('timeout', 10),
|
||||
)
|
||||
)
|
||||
|
||||
sseio, write = sse_transport
|
||||
|
||||
self.session = await self.exit_stack.enter_async_context(ClientSession(sseio, write))
|
||||
|
||||
await self.session.initialize()
|
||||
|
||||
async def initialize(self):
|
||||
self.ap.logger.debug(f'初始化 MCP 会话: {self.server_name} {self.server_config}')
|
||||
|
||||
if self.server_config['mode'] == 'stdio':
|
||||
await self._init_stdio_python_server()
|
||||
elif self.server_config['mode'] == 'sse':
|
||||
await self._init_sse_server()
|
||||
else:
|
||||
raise ValueError(f'无法识别 MCP 服务器类型: {self.server_name}: {self.server_config}')
|
||||
|
||||
tools = await self.session.list_tools()
|
||||
|
||||
self.ap.logger.debug(f'获取 MCP 工具: {tools}')
|
||||
|
||||
for tool in tools.tools:
|
||||
|
||||
async def func(*, _tool=tool, **kwargs):
|
||||
result = await self.session.call_tool(_tool.name, kwargs)
|
||||
if result.isError:
|
||||
raise Exception(result.content[0].text)
|
||||
return result.content[0].text
|
||||
|
||||
func.__name__ = tool.name
|
||||
|
||||
self.functions.append(
|
||||
resource_tool.LLMTool(
|
||||
name=tool.name,
|
||||
human_desc=tool.description,
|
||||
description=tool.description,
|
||||
parameters=tool.inputSchema,
|
||||
func=func,
|
||||
)
|
||||
)
|
||||
|
||||
async def shutdown(self):
|
||||
"""关闭工具"""
|
||||
await self.session._exit_stack.aclose()
|
||||
|
||||
|
||||
@loader.loader_class('mcp')
|
||||
class MCPLoader(loader.ToolLoader):
|
||||
"""MCP 工具加载器。
|
||||
|
||||
在此加载器中管理所有与 MCP Server 的连接。
|
||||
"""
|
||||
|
||||
sessions: dict[str, RuntimeMCPSession] = {}
|
||||
|
||||
_last_listed_functions: list[resource_tool.LLMTool] = []
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
super().__init__(ap)
|
||||
self.sessions = {}
|
||||
self._last_listed_functions = []
|
||||
|
||||
async def initialize(self):
|
||||
for server_config in self.ap.instance_config.data.get('mcp', {}).get('servers', []):
|
||||
if not server_config['enable']:
|
||||
continue
|
||||
session = RuntimeMCPSession(server_config['name'], server_config, self.ap)
|
||||
await session.initialize()
|
||||
# self.ap.event_loop.create_task(session.initialize())
|
||||
self.sessions[server_config['name']] = session
|
||||
|
||||
async def get_tools(self) -> list[resource_tool.LLMTool]:
|
||||
all_functions = []
|
||||
|
||||
for session in self.sessions.values():
|
||||
all_functions.extend(session.functions)
|
||||
|
||||
self._last_listed_functions = all_functions
|
||||
|
||||
return all_functions
|
||||
|
||||
async def has_tool(self, name: str) -> bool:
|
||||
return name in [f.name for f in self._last_listed_functions]
|
||||
|
||||
async def invoke_tool(self, name: str, parameters: dict) -> typing.Any:
|
||||
for server_name, session in self.sessions.items():
|
||||
for function in session.functions:
|
||||
if function.name == name:
|
||||
return await function.func(**parameters)
|
||||
|
||||
raise ValueError(f'未找到工具: {name}')
|
||||
|
||||
async def shutdown(self):
|
||||
"""关闭工具"""
|
||||
for session in self.sessions.values():
|
||||
await session.shutdown()
|
||||
@@ -1,21 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
from ..core import app
|
||||
from . import provider
|
||||
from .providers import localstorage
|
||||
|
||||
|
||||
class StorageMgr:
|
||||
"""存储管理器"""
|
||||
|
||||
ap: app.Application
|
||||
|
||||
storage_provider: provider.StorageProvider
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
self.ap = ap
|
||||
self.storage_provider = localstorage.LocalStorageProvider(ap)
|
||||
|
||||
async def initialize(self):
|
||||
await self.storage_provider.initialize()
|
||||
@@ -1,115 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import typing
|
||||
import os
|
||||
import base64
|
||||
import logging
|
||||
|
||||
import pydantic
|
||||
import requests
|
||||
|
||||
from ..core import app
|
||||
|
||||
|
||||
class Announcement(pydantic.BaseModel):
|
||||
"""公告"""
|
||||
|
||||
id: int
|
||||
|
||||
time: str
|
||||
|
||||
timestamp: int
|
||||
|
||||
content: str
|
||||
|
||||
enabled: typing.Optional[bool] = True
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
'id': self.id,
|
||||
'time': self.time,
|
||||
'timestamp': self.timestamp,
|
||||
'content': self.content,
|
||||
'enabled': self.enabled,
|
||||
}
|
||||
|
||||
|
||||
class AnnouncementManager:
|
||||
"""公告管理器"""
|
||||
|
||||
ap: app.Application = None
|
||||
|
||||
def __init__(self, ap: app.Application):
|
||||
self.ap = ap
|
||||
|
||||
async def fetch_all(self) -> list[Announcement]:
|
||||
"""获取所有公告"""
|
||||
try:
|
||||
resp = requests.get(
|
||||
url='https://api.github.com/repos/langbot-app/LangBot/contents/res/announcement.json',
|
||||
proxies=self.ap.proxy_mgr.get_forward_proxies(),
|
||||
timeout=5,
|
||||
)
|
||||
resp.raise_for_status() # 检查请求是否成功
|
||||
obj_json = resp.json()
|
||||
b64_content = obj_json['content']
|
||||
# 解码
|
||||
content = base64.b64decode(b64_content).decode('utf-8')
|
||||
|
||||
return [Announcement(**item) for item in json.loads(content)]
|
||||
except (requests.RequestException, json.JSONDecodeError, KeyError) as e:
|
||||
self.ap.logger.warning(f'获取公告失败: {e}')
|
||||
pass
|
||||
return [] # 请求失败时返回空列表
|
||||
|
||||
async def fetch_saved(self) -> list[Announcement]:
|
||||
if not os.path.exists('data/labels/announcement_saved.json'):
|
||||
with open('data/labels/announcement_saved.json', 'w', encoding='utf-8') as f:
|
||||
f.write('[]')
|
||||
|
||||
with open('data/labels/announcement_saved.json', 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
if not content:
|
||||
content = '[]'
|
||||
|
||||
return [Announcement(**item) for item in json.loads(content)]
|
||||
|
||||
async def write_saved(self, content: list[Announcement]):
|
||||
with open('data/labels/announcement_saved.json', 'w', encoding='utf-8') as f:
|
||||
f.write(json.dumps([item.to_dict() for item in content], indent=4, ensure_ascii=False))
|
||||
|
||||
async def fetch_new(self) -> list[Announcement]:
|
||||
"""获取新公告"""
|
||||
all = await self.fetch_all()
|
||||
saved = await self.fetch_saved()
|
||||
|
||||
to_show: list[Announcement] = []
|
||||
|
||||
for item in all:
|
||||
# 遍历saved检查是否有相同id的公告
|
||||
for saved_item in saved:
|
||||
if saved_item.id == item.id:
|
||||
break
|
||||
else:
|
||||
if item.enabled:
|
||||
# 没有相同id的公告
|
||||
to_show.append(item)
|
||||
|
||||
await self.write_saved(all)
|
||||
return to_show
|
||||
|
||||
async def show_announcements(self) -> typing.Tuple[str, int]:
|
||||
"""显示公告"""
|
||||
try:
|
||||
announcements = await self.fetch_new()
|
||||
ann_text = ''
|
||||
for ann in announcements:
|
||||
ann_text += f'[公告] {ann.time}: {ann.content}\n'
|
||||
|
||||
# TODO statistics
|
||||
|
||||
return ann_text, logging.INFO
|
||||
except Exception as e:
|
||||
return f'获取公告时出错: {e}', logging.WARNING
|
||||
@@ -1,8 +1,9 @@
|
||||
[project]
|
||||
name = "langbot"
|
||||
version = "4.3.1"
|
||||
version = "4.5.3"
|
||||
description = "Easy-to-use global IM bot platform designed for LLM era"
|
||||
readme = "README.md"
|
||||
license-files = ["LICENSE"]
|
||||
requires-python = ">=3.10.1,<4.0"
|
||||
dependencies = [
|
||||
"aiocqhttp>=1.4.4",
|
||||
@@ -45,7 +46,6 @@ dependencies = [
|
||||
"urllib3>=2.4.0",
|
||||
"websockets>=15.0.1",
|
||||
"python-socks>=2.7.1", # dingtalk missing dependency
|
||||
"taskgroup==0.0.0a4", # graingert/taskgroup#20
|
||||
"pip>=25.1.1",
|
||||
"ruff>=0.11.9",
|
||||
"pre-commit>=4.2.0",
|
||||
@@ -60,11 +60,14 @@ dependencies = [
|
||||
"ebooklib>=0.18",
|
||||
"html2text>=2024.2.26",
|
||||
"langchain>=0.2.0",
|
||||
"langchain-text-splitters>=0.0.1",
|
||||
"chromadb>=0.4.24",
|
||||
"qdrant-client (>=1.15.1,<2.0.0)",
|
||||
"langbot-plugin==0.1.1",
|
||||
"langbot-plugin==0.1.11",
|
||||
"asyncpg>=0.30.0",
|
||||
"line-bot-sdk>=3.19.0"
|
||||
"line-bot-sdk>=3.19.0",
|
||||
"tboxsdk>=0.0.10",
|
||||
"boto3>=1.35.0",
|
||||
]
|
||||
keywords = [
|
||||
"bot",
|
||||
@@ -82,11 +85,10 @@ keywords = [
|
||||
"onebot",
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Framework :: AsyncIO",
|
||||
"Framework :: Robot Framework",
|
||||
"Framework :: Robot Framework :: Library",
|
||||
"License :: OSI Approved :: AGPL-3 License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Communications :: Chat",
|
||||
@@ -97,11 +99,22 @@ Homepage = "https://langbot.app"
|
||||
Documentation = "https://docs.langbot.app"
|
||||
Repository = "https://github.com/langbot-app/LangBot"
|
||||
|
||||
[project.scripts]
|
||||
langbot = "langbot.__main__:main"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.setuptools]
|
||||
package-data = { "langbot" = ["templates/**", "pkg/provider/modelmgr/requesters/*", "pkg/platform/sources/*", "web/out/**"] }
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pre-commit>=4.2.0",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-asyncio>=1.0.0",
|
||||
"pytest-cov>=7.0.0",
|
||||
"ruff>=0.11.9",
|
||||
]
|
||||
|
||||
|
||||
39
pytest.ini
Normal file
39
pytest.ini
Normal file
@@ -0,0 +1,39 @@
|
||||
[pytest]
|
||||
# Test discovery patterns
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
|
||||
# Test paths
|
||||
testpaths = tests
|
||||
|
||||
# Asyncio configuration
|
||||
asyncio_mode = auto
|
||||
|
||||
# Output options
|
||||
addopts =
|
||||
-v
|
||||
--strict-markers
|
||||
--tb=short
|
||||
--disable-warnings
|
||||
|
||||
# Markers
|
||||
markers =
|
||||
asyncio: mark test as async
|
||||
unit: mark test as unit test
|
||||
integration: mark test as integration test
|
||||
slow: mark test as slow running
|
||||
|
||||
# Coverage options (when using pytest-cov)
|
||||
[coverage:run]
|
||||
source = langbot.pkg
|
||||
omit =
|
||||
*/tests/*
|
||||
*/test_*.py
|
||||
*/__pycache__/*
|
||||
*/site-packages/*
|
||||
|
||||
[coverage:report]
|
||||
precision = 2
|
||||
show_missing = True
|
||||
skip_covered = False
|
||||
31
run_tests.sh
Executable file
31
run_tests.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to run all unit tests
|
||||
# This script helps avoid circular import issues by setting up the environment properly
|
||||
|
||||
set -e
|
||||
|
||||
echo "Setting up test environment..."
|
||||
|
||||
# Activate virtual environment if it exists
|
||||
if [ -d ".venv" ]; then
|
||||
source .venv/bin/activate
|
||||
fi
|
||||
|
||||
# Check if pytest is installed
|
||||
if ! command -v pytest &> /dev/null; then
|
||||
echo "Installing test dependencies..."
|
||||
pip install pytest pytest-asyncio pytest-cov
|
||||
fi
|
||||
|
||||
echo "Running all unit tests..."
|
||||
|
||||
# Run tests with coverage
|
||||
pytest tests/unit_tests/ -v --tb=short \
|
||||
--cov=pkg \
|
||||
--cov-report=xml \
|
||||
"$@"
|
||||
|
||||
echo ""
|
||||
echo "Test run complete!"
|
||||
echo "Coverage report saved to coverage.xml"
|
||||
3
src/langbot/__init__.py
Normal file
3
src/langbot/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""LangBot - Easy-to-use global IM bot platform designed for LLM era"""
|
||||
|
||||
__version__ = '4.5.3'
|
||||
104
src/langbot/__main__.py
Normal file
104
src/langbot/__main__.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""LangBot entry point for package execution"""
|
||||
|
||||
import asyncio
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
|
||||
# ASCII art banner
|
||||
asciiart = r"""
|
||||
_ ___ _
|
||||
| | __ _ _ _ __ _| _ ) ___| |_
|
||||
| |__/ _` | ' \/ _` | _ \/ _ \ _|
|
||||
|____\__,_|_||_\__, |___/\___/\__|
|
||||
|___/
|
||||
|
||||
⭐️ Open Source 开源地址: https://github.com/langbot-app/LangBot
|
||||
📖 Documentation 文档地址: https://docs.langbot.app
|
||||
"""
|
||||
|
||||
|
||||
async def main_entry(loop: asyncio.AbstractEventLoop):
|
||||
"""Main entry point for LangBot"""
|
||||
parser = argparse.ArgumentParser(description='LangBot')
|
||||
parser.add_argument(
|
||||
'--standalone-runtime',
|
||||
action='store_true',
|
||||
help='Use standalone plugin runtime / 使用独立插件运行时',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument('--debug', action='store_true', help='Debug mode / 调试模式', default=False)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.standalone_runtime:
|
||||
from langbot.pkg.utils import platform
|
||||
|
||||
platform.standalone_runtime = True
|
||||
|
||||
if args.debug:
|
||||
from langbot.pkg.utils import constants
|
||||
|
||||
constants.debug_mode = True
|
||||
|
||||
print(asciiart)
|
||||
|
||||
# Check dependencies
|
||||
from langbot.pkg.core.bootutils import deps
|
||||
|
||||
missing_deps = await deps.check_deps()
|
||||
|
||||
if missing_deps:
|
||||
print('以下依赖包未安装,将自动安装,请完成后重启程序:')
|
||||
print(
|
||||
'These dependencies are missing, they will be installed automatically, please restart the program after completion:'
|
||||
)
|
||||
for dep in missing_deps:
|
||||
print('-', dep)
|
||||
await deps.install_deps(missing_deps)
|
||||
print('已自动安装缺失的依赖包,请重启程序。')
|
||||
print('The missing dependencies have been installed automatically, please restart the program.')
|
||||
sys.exit(0)
|
||||
|
||||
# Check configuration files
|
||||
from langbot.pkg.core.bootutils import files
|
||||
|
||||
generated_files = await files.generate_files()
|
||||
|
||||
if generated_files:
|
||||
print('以下文件不存在,已自动生成:')
|
||||
print('Following files do not exist and have been automatically generated:')
|
||||
for file in generated_files:
|
||||
print('-', file)
|
||||
|
||||
from langbot.pkg.core import boot
|
||||
|
||||
await boot.main(loop)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to be called by console script entry point"""
|
||||
# Check Python version
|
||||
if sys.version_info < (3, 10, 1):
|
||||
print('需要 Python 3.10.1 及以上版本,当前 Python 版本为:', sys.version)
|
||||
print('Your Python version is not supported.')
|
||||
print('Python 3.10.1 or higher is required. Current version:', sys.version)
|
||||
sys.exit(1)
|
||||
|
||||
# Set up the working directory
|
||||
# When installed as a package, we need to handle the working directory differently
|
||||
# We'll create data directory in current working directory if not exists
|
||||
os.makedirs('data', exist_ok=True)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
try:
|
||||
loop.run_until_complete(main_entry(loop))
|
||||
except KeyboardInterrupt:
|
||||
print('\n正在退出...')
|
||||
print('Exiting...')
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
177
src/langbot/libs/coze_server_api/client.py
Normal file
177
src/langbot/libs/coze_server_api/client.py
Normal file
@@ -0,0 +1,177 @@
|
||||
import json
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import io
|
||||
from typing import Dict, List, Any, AsyncGenerator
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class AsyncCozeAPIClient:
|
||||
def __init__(self, api_key: str, api_base: str = 'https://api.coze.cn'):
|
||||
self.api_key = api_key
|
||||
self.api_base = api_base
|
||||
self.session = None
|
||||
|
||||
async def __aenter__(self):
|
||||
"""支持异步上下文管理器"""
|
||||
await self.coze_session()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""退出时自动关闭会话"""
|
||||
await self.close()
|
||||
|
||||
async def coze_session(self):
|
||||
"""确保HTTP session存在"""
|
||||
if self.session is None:
|
||||
connector = aiohttp.TCPConnector(
|
||||
ssl=False if self.api_base.startswith('http://') else True,
|
||||
limit=100,
|
||||
limit_per_host=30,
|
||||
keepalive_timeout=30,
|
||||
enable_cleanup_closed=True,
|
||||
)
|
||||
timeout = aiohttp.ClientTimeout(
|
||||
total=120, # 默认超时时间
|
||||
connect=30,
|
||||
sock_read=120,
|
||||
)
|
||||
headers = {
|
||||
'Authorization': f'Bearer {self.api_key}',
|
||||
'Accept': 'text/event-stream',
|
||||
}
|
||||
self.session = aiohttp.ClientSession(headers=headers, timeout=timeout, connector=connector)
|
||||
return self.session
|
||||
|
||||
async def close(self):
|
||||
"""显式关闭会话"""
|
||||
if self.session and not self.session.closed:
|
||||
await self.session.close()
|
||||
self.session = None
|
||||
|
||||
async def upload(
|
||||
self,
|
||||
file,
|
||||
) -> str:
|
||||
# 处理 Path 对象
|
||||
if isinstance(file, Path):
|
||||
if not file.exists():
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件路径字符串
|
||||
elif isinstance(file, str):
|
||||
if not os.path.isfile(file):
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件对象
|
||||
elif hasattr(file, 'read'):
|
||||
file = file.read()
|
||||
|
||||
session = await self.coze_session()
|
||||
url = f'{self.api_base}/v1/files/upload'
|
||||
|
||||
try:
|
||||
file_io = io.BytesIO(file)
|
||||
async with session.post(
|
||||
url,
|
||||
data={
|
||||
'file': file_io,
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=60),
|
||||
) as response:
|
||||
if response.status == 401:
|
||||
raise Exception('Coze API 认证失败,请检查 API Key 是否正确')
|
||||
|
||||
response_text = await response.text()
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f'文件上传失败,状态码: {response.status}, 响应: {response_text}')
|
||||
try:
|
||||
result = await response.json()
|
||||
except json.JSONDecodeError:
|
||||
raise Exception(f'文件上传响应解析失败: {response_text}')
|
||||
|
||||
if result.get('code') != 0:
|
||||
raise Exception(f'文件上传失败: {result.get("msg", "未知错误")}')
|
||||
|
||||
file_id = result['data']['id']
|
||||
return file_id
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception('文件上传超时')
|
||||
except Exception as e:
|
||||
raise Exception(f'文件上传失败: {str(e)}')
|
||||
|
||||
async def chat_messages(
|
||||
self,
|
||||
bot_id: str,
|
||||
user_id: str,
|
||||
additional_messages: List[Dict] | None = None,
|
||||
conversation_id: str | None = None,
|
||||
auto_save_history: bool = True,
|
||||
stream: bool = True,
|
||||
timeout: float = 120,
|
||||
) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""发送聊天消息并返回流式响应
|
||||
|
||||
Args:
|
||||
bot_id: Bot ID
|
||||
user_id: 用户ID
|
||||
additional_messages: 额外消息列表
|
||||
conversation_id: 会话ID
|
||||
auto_save_history: 是否自动保存历史
|
||||
stream: 是否流式响应
|
||||
timeout: 超时时间
|
||||
"""
|
||||
session = await self.coze_session()
|
||||
url = f'{self.api_base}/v3/chat'
|
||||
|
||||
payload = {
|
||||
'bot_id': bot_id,
|
||||
'user_id': user_id,
|
||||
'stream': stream,
|
||||
'auto_save_history': auto_save_history,
|
||||
}
|
||||
|
||||
if additional_messages:
|
||||
payload['additional_messages'] = additional_messages
|
||||
|
||||
params = {}
|
||||
if conversation_id:
|
||||
params['conversation_id'] = conversation_id
|
||||
|
||||
try:
|
||||
async with session.post(
|
||||
url,
|
||||
json=payload,
|
||||
params=params,
|
||||
timeout=aiohttp.ClientTimeout(total=timeout),
|
||||
) as response:
|
||||
if response.status == 401:
|
||||
raise Exception('Coze API 认证失败,请检查 API Key 是否正确')
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f'Coze API 流式请求失败,状态码: {response.status}')
|
||||
|
||||
async for chunk in response.content:
|
||||
chunk = chunk.decode('utf-8')
|
||||
if chunk != '\n':
|
||||
if chunk.startswith('event:'):
|
||||
chunk_type = chunk.replace('event:', '', 1).strip()
|
||||
elif chunk.startswith('data:'):
|
||||
chunk_data = chunk.replace('data:', '', 1).strip()
|
||||
else:
|
||||
yield {
|
||||
'event': chunk_type,
|
||||
'data': json.loads(chunk_data) if chunk_data else {},
|
||||
} # 处理本地部署时,接口返回的data为空值
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise Exception(f'Coze API 流式请求超时 ({timeout}秒)')
|
||||
except Exception as e:
|
||||
raise Exception(f'Coze API 流式请求失败: {str(e)}')
|
||||
@@ -5,6 +5,8 @@ import typing
|
||||
import json
|
||||
|
||||
from .errors import DifyAPIError
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
|
||||
class AsyncDifyServiceClient:
|
||||
@@ -109,7 +111,23 @@ class AsyncDifyServiceClient:
|
||||
user: str,
|
||||
timeout: float = 30.0,
|
||||
) -> str:
|
||||
"""上传文件"""
|
||||
# 处理 Path 对象
|
||||
if isinstance(file, Path):
|
||||
if not file.exists():
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件路径字符串
|
||||
elif isinstance(file, str):
|
||||
if not os.path.isfile(file):
|
||||
raise ValueError(f'File not found: {file}')
|
||||
with open(file, 'rb') as f:
|
||||
file = f.read()
|
||||
|
||||
# 处理文件对象
|
||||
elif hasattr(file, 'read'):
|
||||
file = file.read()
|
||||
async with httpx.AsyncClient(
|
||||
base_url=self.base_url,
|
||||
trust_env=True,
|
||||
@@ -121,6 +139,8 @@ class AsyncDifyServiceClient:
|
||||
headers={'Authorization': f'Bearer {self.api_key}'},
|
||||
files={
|
||||
'file': file,
|
||||
},
|
||||
data={
|
||||
'user': (None, user),
|
||||
},
|
||||
)
|
||||
@@ -110,6 +110,24 @@ class DingTalkClient:
|
||||
else:
|
||||
raise Exception(f'Error: {response.status_code}, {response.text}')
|
||||
|
||||
async def get_file_url(self, download_code: str):
|
||||
if not await self.check_access_token():
|
||||
await self.get_access_token()
|
||||
url = 'https://api.dingtalk.com/v1.0/robot/messageFiles/download'
|
||||
params = {'downloadCode': download_code, 'robotCode': self.robot_code}
|
||||
headers = {'x-acs-dingtalk-access-token': self.access_token}
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(url, headers=headers, json=params)
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
download_url = result.get('downloadUrl')
|
||||
if download_url:
|
||||
return download_url
|
||||
else:
|
||||
await self.logger.error(f'failed to get file: {response.json()}')
|
||||
else:
|
||||
raise Exception(f'Error: {response.status_code}, {response.text}')
|
||||
|
||||
async def update_incoming_message(self, message):
|
||||
"""异步更新 DingTalkClient 中的 incoming_message"""
|
||||
message_data = await self.get_message(message)
|
||||
@@ -170,12 +188,69 @@ class DingTalkClient:
|
||||
|
||||
if incoming_message.message_type == 'richText':
|
||||
data = incoming_message.rich_text_content.to_dict()
|
||||
|
||||
# 使用统一的结构化数据格式,保持顺序
|
||||
rich_content = {
|
||||
'Type': 'richText',
|
||||
'Elements': [], # 按顺序存储所有元素
|
||||
'SimpleContent': '', # 兼容字段:纯文本内容
|
||||
'SimplePicture': '', # 兼容字段:第一张图片
|
||||
}
|
||||
|
||||
# 先收集所有文本和图片占位符
|
||||
text_elements = []
|
||||
|
||||
# 解析富文本内容,保持原始顺序
|
||||
for item in data['richText']:
|
||||
if 'text' in item:
|
||||
message_data['Content'] = item['text']
|
||||
if incoming_message.get_image_list()[0]:
|
||||
message_data['Picture'] = await self.download_image(incoming_message.get_image_list()[0])
|
||||
message_data['Type'] = 'text'
|
||||
# 处理文本内容
|
||||
if 'text' in item and item['text'] != '\n':
|
||||
element = {'Type': 'text', 'Content': item['text']}
|
||||
rich_content['Elements'].append(element)
|
||||
text_elements.append(item['text'])
|
||||
|
||||
# 检查是否是图片元素 - 根据钉钉API的实际结构调整
|
||||
# 钉钉富文本中的图片通常有特定标识,可能需要根据实际返回调整
|
||||
elif item.get('type') == 'picture':
|
||||
# 创建图片占位符
|
||||
element = {
|
||||
'Type': 'image_placeholder',
|
||||
}
|
||||
rich_content['Elements'].append(element)
|
||||
|
||||
# 获取并下载所有图片
|
||||
image_list = incoming_message.get_image_list()
|
||||
if image_list:
|
||||
new_elements = []
|
||||
image_index = 0
|
||||
|
||||
for element in rich_content['Elements']:
|
||||
if element['Type'] == 'image_placeholder':
|
||||
if image_index < len(image_list) and image_list[image_index]:
|
||||
image_url = await self.download_image(image_list[image_index])
|
||||
new_elements.append({'Type': 'image', 'Picture': image_url})
|
||||
image_index += 1
|
||||
else:
|
||||
# 如果没有对应的图片,保留占位符或跳过
|
||||
continue
|
||||
else:
|
||||
new_elements.append(element)
|
||||
|
||||
rich_content['Elements'] = new_elements
|
||||
|
||||
# 设置兼容字段
|
||||
all_texts = [elem['Content'] for elem in rich_content['Elements'] if elem.get('Type') == 'text']
|
||||
rich_content['SimpleContent'] = '\n'.join(all_texts) if all_texts else ''
|
||||
|
||||
all_images = [elem['Picture'] for elem in rich_content['Elements'] if elem.get('Type') == 'image']
|
||||
if all_images:
|
||||
rich_content['SimplePicture'] = all_images[0]
|
||||
rich_content['AllImages'] = all_images # 所有图片的列表
|
||||
|
||||
# 设置原始的 content 和 picture 字段以保持兼容
|
||||
message_data['Content'] = rich_content['SimpleContent']
|
||||
message_data['Rich_Content'] = rich_content
|
||||
if all_images:
|
||||
message_data['Picture'] = all_images[0]
|
||||
|
||||
elif incoming_message.message_type == 'text':
|
||||
message_data['Content'] = incoming_message.get_text_list()[0]
|
||||
@@ -189,6 +264,17 @@ class DingTalkClient:
|
||||
message_data['Audio'] = await self.get_audio_url(incoming_message.to_dict()['content']['downloadCode'])
|
||||
|
||||
message_data['Type'] = 'audio'
|
||||
elif incoming_message.message_type == 'file':
|
||||
down_list = incoming_message.get_down_list()
|
||||
if len(down_list) >= 2:
|
||||
message_data['File'] = await self.get_file_url(down_list[0])
|
||||
message_data['Name'] = down_list[1]
|
||||
else:
|
||||
if self.logger:
|
||||
await self.logger.error(f'get_down_list() returned fewer than 2 elements: {down_list}')
|
||||
message_data['File'] = None
|
||||
message_data['Name'] = None
|
||||
message_data['Type'] = 'file'
|
||||
|
||||
copy_message_data = message_data.copy()
|
||||
del copy_message_data['IncomingMessage']
|
||||
@@ -15,6 +15,10 @@ class DingTalkEvent(dict):
|
||||
def content(self):
|
||||
return self.get('Content', '')
|
||||
|
||||
@property
|
||||
def rich_content(self):
|
||||
return self.get('Rich_Content', '')
|
||||
|
||||
@property
|
||||
def incoming_message(self) -> Optional['dingtalk_stream.chatbot.ChatbotMessage']:
|
||||
return self.get('IncomingMessage')
|
||||
@@ -31,6 +35,14 @@ class DingTalkEvent(dict):
|
||||
def audio(self):
|
||||
return self.get('Audio', '')
|
||||
|
||||
@property
|
||||
def file(self):
|
||||
return self.get('File', '')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.get('Name', '')
|
||||
|
||||
@property
|
||||
def conversation(self):
|
||||
return self.get('conversation_type', '')
|
||||
@@ -1,12 +1,12 @@
|
||||
# 微信公众号的加解密算法与企业微信一样,所以直接使用企业微信的加解密算法文件
|
||||
import time
|
||||
import traceback
|
||||
from libs.wecom_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
from langbot.libs.wecom_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
import xml.etree.ElementTree as ET
|
||||
from quart import Quart, request
|
||||
import hashlib
|
||||
from typing import Callable
|
||||
from .oaevent import OAEvent
|
||||
from langbot.libs.official_account_api.oaevent import OAEvent
|
||||
|
||||
import asyncio
|
||||
|
||||
@@ -23,20 +23,25 @@ xml_template = """
|
||||
|
||||
|
||||
class OAClient:
|
||||
def __init__(self, token: str, EncodingAESKey: str, AppID: str, Appsecret: str, logger: None):
|
||||
def __init__(self, token: str, EncodingAESKey: str, AppID: str, Appsecret: str, logger: None, unified_mode: bool = False):
|
||||
self.token = token
|
||||
self.aes = EncodingAESKey
|
||||
self.appid = AppID
|
||||
self.appsecret = Appsecret
|
||||
self.base_url = 'https://api.weixin.qq.com'
|
||||
self.access_token = ''
|
||||
self.unified_mode = unified_mode
|
||||
self.app = Quart(__name__)
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['GET', 'POST'],
|
||||
)
|
||||
|
||||
# 只有在非统一模式下才注册独立路由
|
||||
if not self.unified_mode:
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['GET', 'POST'],
|
||||
)
|
||||
|
||||
self._message_handlers = {
|
||||
'example': [],
|
||||
}
|
||||
@@ -46,19 +51,39 @@ class OAClient:
|
||||
self.logger = logger
|
||||
|
||||
async def handle_callback_request(self):
|
||||
"""处理回调请求(独立端口模式,使用全局 request)。"""
|
||||
return await self._handle_callback_internal(request)
|
||||
|
||||
async def handle_unified_webhook(self, req):
|
||||
"""处理回调请求(统一 webhook 模式,显式传递 request)。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
|
||||
Returns:
|
||||
响应数据
|
||||
"""
|
||||
return await self._handle_callback_internal(req)
|
||||
|
||||
async def _handle_callback_internal(self, req):
|
||||
"""处理回调请求的内部实现,包括 GET 验证和 POST 消息接收。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
"""
|
||||
try:
|
||||
# 每隔100毫秒查询是否生成ai回答
|
||||
start_time = time.time()
|
||||
signature = request.args.get('signature', '')
|
||||
timestamp = request.args.get('timestamp', '')
|
||||
nonce = request.args.get('nonce', '')
|
||||
echostr = request.args.get('echostr', '')
|
||||
msg_signature = request.args.get('msg_signature', '')
|
||||
signature = req.args.get('signature', '')
|
||||
timestamp = req.args.get('timestamp', '')
|
||||
nonce = req.args.get('nonce', '')
|
||||
echostr = req.args.get('echostr', '')
|
||||
msg_signature = req.args.get('msg_signature', '')
|
||||
if msg_signature is None:
|
||||
await self.logger.error('msg_signature不在请求体中')
|
||||
raise Exception('msg_signature不在请求体中')
|
||||
|
||||
if request.method == 'GET':
|
||||
if req.method == 'GET':
|
||||
# 校验签名
|
||||
check_str = ''.join(sorted([self.token, timestamp, nonce]))
|
||||
check_signature = hashlib.sha1(check_str.encode('utf-8')).hexdigest()
|
||||
@@ -68,8 +93,8 @@ class OAClient:
|
||||
else:
|
||||
await self.logger.error('拒绝请求')
|
||||
raise Exception('拒绝请求')
|
||||
elif request.method == 'POST':
|
||||
encryt_msg = await request.data
|
||||
elif req.method == 'POST':
|
||||
encryt_msg = await req.data
|
||||
wxcpt = WXBizMsgCrypt(self.token, self.aes, self.appid)
|
||||
ret, xml_msg = wxcpt.DecryptMsg(encryt_msg, msg_signature, timestamp, nonce)
|
||||
xml_msg = xml_msg.decode('utf-8')
|
||||
@@ -182,6 +207,7 @@ class OAClientForLongerResponse:
|
||||
Appsecret: str,
|
||||
LoadingMessage: str,
|
||||
logger: None,
|
||||
unified_mode: bool = False,
|
||||
):
|
||||
self.token = token
|
||||
self.aes = EncodingAESKey
|
||||
@@ -189,13 +215,18 @@ class OAClientForLongerResponse:
|
||||
self.appsecret = Appsecret
|
||||
self.base_url = 'https://api.weixin.qq.com'
|
||||
self.access_token = ''
|
||||
self.unified_mode = unified_mode
|
||||
self.app = Quart(__name__)
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['GET', 'POST'],
|
||||
)
|
||||
|
||||
# 只有在非统一模式下才注册独立路由
|
||||
if not self.unified_mode:
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['GET', 'POST'],
|
||||
)
|
||||
|
||||
self._message_handlers = {
|
||||
'example': [],
|
||||
}
|
||||
@@ -206,24 +237,44 @@ class OAClientForLongerResponse:
|
||||
self.logger = logger
|
||||
|
||||
async def handle_callback_request(self):
|
||||
"""处理回调请求(独立端口模式,使用全局 request)。"""
|
||||
return await self._handle_callback_internal(request)
|
||||
|
||||
async def handle_unified_webhook(self, req):
|
||||
"""处理回调请求(统一 webhook 模式,显式传递 request)。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
|
||||
Returns:
|
||||
响应数据
|
||||
"""
|
||||
return await self._handle_callback_internal(req)
|
||||
|
||||
async def _handle_callback_internal(self, req):
|
||||
"""处理回调请求的内部实现,包括 GET 验证和 POST 消息接收。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
"""
|
||||
try:
|
||||
signature = request.args.get('signature', '')
|
||||
timestamp = request.args.get('timestamp', '')
|
||||
nonce = request.args.get('nonce', '')
|
||||
echostr = request.args.get('echostr', '')
|
||||
msg_signature = request.args.get('msg_signature', '')
|
||||
signature = req.args.get('signature', '')
|
||||
timestamp = req.args.get('timestamp', '')
|
||||
nonce = req.args.get('nonce', '')
|
||||
echostr = req.args.get('echostr', '')
|
||||
msg_signature = req.args.get('msg_signature', '')
|
||||
|
||||
if msg_signature is None:
|
||||
await self.logger.error('msg_signature不在请求体中')
|
||||
raise Exception('msg_signature不在请求体中')
|
||||
|
||||
if request.method == 'GET':
|
||||
if req.method == 'GET':
|
||||
check_str = ''.join(sorted([self.token, timestamp, nonce]))
|
||||
check_signature = hashlib.sha1(check_str.encode('utf-8')).hexdigest()
|
||||
return echostr if check_signature == signature else '拒绝请求'
|
||||
|
||||
elif request.method == 'POST':
|
||||
encryt_msg = await request.data
|
||||
elif req.method == 'POST':
|
||||
encryt_msg = await req.data
|
||||
wxcpt = WXBizMsgCrypt(self.token, self.aes, self.appid)
|
||||
ret, xml_msg = wxcpt.DecryptMsg(encryt_msg, msg_signature, timestamp, nonce)
|
||||
xml_msg = xml_msg.decode('utf-8')
|
||||
@@ -10,38 +10,20 @@ import traceback
|
||||
from cryptography.hazmat.primitives.asymmetric import ed25519
|
||||
|
||||
|
||||
def handle_validation(body: dict, bot_secret: str):
|
||||
# bot正确的secert是32位的,此处仅为了适配演示demo
|
||||
while len(bot_secret) < 32:
|
||||
bot_secret = bot_secret * 2
|
||||
bot_secret = bot_secret[:32]
|
||||
# 实际使用场景中以上三行内容可清除
|
||||
|
||||
seed_bytes = bot_secret.encode()
|
||||
|
||||
signing_key = ed25519.Ed25519PrivateKey.from_private_bytes(seed_bytes)
|
||||
|
||||
msg = body['d']['event_ts'] + body['d']['plain_token']
|
||||
msg_bytes = msg.encode()
|
||||
|
||||
signature = signing_key.sign(msg_bytes)
|
||||
|
||||
signature_hex = signature.hex()
|
||||
|
||||
response = {'plain_token': body['d']['plain_token'], 'signature': signature_hex}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class QQOfficialClient:
|
||||
def __init__(self, secret: str, token: str, app_id: str, logger: None):
|
||||
def __init__(self, secret: str, token: str, app_id: str, logger: None, unified_mode: bool = False):
|
||||
self.unified_mode = unified_mode
|
||||
self.app = Quart(__name__)
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['GET', 'POST'],
|
||||
)
|
||||
|
||||
# 只有在非统一模式下才注册独立路由
|
||||
if not self.unified_mode:
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['GET', 'POST'],
|
||||
)
|
||||
|
||||
self.secret = secret
|
||||
self.token = token
|
||||
self.app_id = app_id
|
||||
@@ -82,18 +64,45 @@ class QQOfficialClient:
|
||||
raise Exception(f'获取access_token失败: {e}')
|
||||
|
||||
async def handle_callback_request(self):
|
||||
"""处理回调请求"""
|
||||
"""处理回调请求(独立端口模式,使用全局 request)"""
|
||||
return await self._handle_callback_internal(request)
|
||||
|
||||
async def handle_unified_webhook(self, req):
|
||||
"""处理回调请求(统一 webhook 模式,显式传递 request)。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
|
||||
Returns:
|
||||
响应数据
|
||||
"""
|
||||
return await self._handle_callback_internal(req)
|
||||
|
||||
async def _handle_callback_internal(self, req):
|
||||
"""处理回调请求的内部实现。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
"""
|
||||
try:
|
||||
# 读取请求数据
|
||||
body = await request.get_data()
|
||||
|
||||
body = await req.get_data()
|
||||
|
||||
print(f'[QQ Official] Received request, body length: {len(body)}')
|
||||
|
||||
if not body or len(body) == 0:
|
||||
print('[QQ Official] Received empty body, might be health check or GET request')
|
||||
return {'code': 0, 'message': 'ok'}, 200
|
||||
|
||||
payload = json.loads(body)
|
||||
|
||||
# 验证是否为回调验证请求
|
||||
|
||||
if payload.get('op') == 13:
|
||||
# 生成签名
|
||||
response = handle_validation(payload, self.secret)
|
||||
|
||||
return response
|
||||
validation_data = payload.get('d')
|
||||
if not validation_data:
|
||||
return {'error': "missing 'd' field"}, 400
|
||||
response = await self.verify(validation_data)
|
||||
return response, 200
|
||||
|
||||
if payload.get('op') == 0:
|
||||
message_data = await self.get_message(payload)
|
||||
@@ -104,6 +113,7 @@ class QQOfficialClient:
|
||||
return {'code': 0, 'message': 'success'}
|
||||
|
||||
except Exception as e:
|
||||
print(f'[QQ Official] ERROR: {traceback.format_exc()}')
|
||||
await self.logger.error(f'Error in handle_callback_request: {traceback.format_exc()}')
|
||||
return {'error': str(e)}, 400
|
||||
|
||||
@@ -261,3 +271,26 @@ class QQOfficialClient:
|
||||
if self.access_token_expiry_time is None:
|
||||
return True
|
||||
return time.time() > self.access_token_expiry_time
|
||||
|
||||
async def repeat_seed(self, bot_secret: str, target_size: int = 32) -> bytes:
|
||||
seed = bot_secret
|
||||
while len(seed) < target_size:
|
||||
seed *= 2
|
||||
return seed[:target_size].encode("utf-8")
|
||||
|
||||
async def verify(self, validation_payload: dict):
|
||||
seed = await self.repeat_seed(self.secret)
|
||||
private_key = ed25519.Ed25519PrivateKey.from_private_bytes(seed)
|
||||
|
||||
event_ts = validation_payload.get("event_ts", "")
|
||||
plain_token = validation_payload.get("plain_token", "")
|
||||
msg = event_ts + plain_token
|
||||
|
||||
# sign
|
||||
signature = private_key.sign(msg.encode()).hex()
|
||||
|
||||
response = {
|
||||
"plain_token": plain_token,
|
||||
"signature": signature,
|
||||
}
|
||||
return response
|
||||
@@ -8,14 +8,19 @@ import langbot_plugin.api.entities.builtin.platform.events as platform_events
|
||||
|
||||
|
||||
class SlackClient:
|
||||
def __init__(self, bot_token: str, signing_secret: str, logger: None):
|
||||
def __init__(self, bot_token: str, signing_secret: str, logger: None, unified_mode: bool = False):
|
||||
self.bot_token = bot_token
|
||||
self.signing_secret = signing_secret
|
||||
self.unified_mode = unified_mode
|
||||
self.app = Quart(__name__)
|
||||
self.client = AsyncWebClient(self.bot_token)
|
||||
self.app.add_url_rule(
|
||||
'/callback/command', 'handle_callback', self.handle_callback_request, methods=['GET', 'POST']
|
||||
)
|
||||
|
||||
# 只有在非统一模式下才注册独立路由
|
||||
if not self.unified_mode:
|
||||
self.app.add_url_rule(
|
||||
'/callback/command', 'handle_callback', self.handle_callback_request, methods=['GET', 'POST']
|
||||
)
|
||||
|
||||
self._message_handlers = {
|
||||
'example': [],
|
||||
}
|
||||
@@ -23,8 +28,28 @@ class SlackClient:
|
||||
self.logger = logger
|
||||
|
||||
async def handle_callback_request(self):
|
||||
"""处理回调请求(独立端口模式,使用全局 request)"""
|
||||
return await self._handle_callback_internal(request)
|
||||
|
||||
async def handle_unified_webhook(self, req):
|
||||
"""处理回调请求(统一 webhook 模式,显式传递 request)。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
|
||||
Returns:
|
||||
响应数据
|
||||
"""
|
||||
return await self._handle_callback_internal(req)
|
||||
|
||||
async def _handle_callback_internal(self, req):
|
||||
"""处理回调请求的内部实现。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
"""
|
||||
try:
|
||||
body = await request.get_data()
|
||||
body = await req.get_data()
|
||||
data = json.loads(body)
|
||||
if 'type' in data:
|
||||
if data['type'] == 'url_verification':
|
||||
@@ -1,4 +1,4 @@
|
||||
from libs.wechatpad_api.util.http_util import post_json
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json
|
||||
|
||||
|
||||
class ChatRoomApi:
|
||||
@@ -1,4 +1,4 @@
|
||||
from libs.wechatpad_api.util.http_util import post_json
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json
|
||||
import httpx
|
||||
import base64
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from libs.wechatpad_api.util.http_util import post_json, get_json
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json, get_json
|
||||
|
||||
|
||||
class LoginApi:
|
||||
@@ -1,4 +1,4 @@
|
||||
from libs.wechatpad_api.util.http_util import post_json
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json
|
||||
|
||||
|
||||
class MessageApi:
|
||||
@@ -1,4 +1,4 @@
|
||||
from libs.wechatpad_api.util.http_util import post_json, async_request, get_json
|
||||
from langbot.libs.wechatpad_api.util.http_util import post_json, async_request, get_json
|
||||
|
||||
|
||||
class UserApi:
|
||||
@@ -1,9 +1,9 @@
|
||||
from libs.wechatpad_api.api.login import LoginApi
|
||||
from libs.wechatpad_api.api.friend import FriendApi
|
||||
from libs.wechatpad_api.api.message import MessageApi
|
||||
from libs.wechatpad_api.api.user import UserApi
|
||||
from libs.wechatpad_api.api.downloadpai import DownloadApi
|
||||
from libs.wechatpad_api.api.chatroom import ChatRoomApi
|
||||
from langbot.libs.wechatpad_api.api.login import LoginApi
|
||||
from langbot.libs.wechatpad_api.api.friend import FriendApi
|
||||
from langbot.libs.wechatpad_api.api.message import MessageApi
|
||||
from langbot.libs.wechatpad_api.api.user import UserApi
|
||||
from langbot.libs.wechatpad_api.api.downloadpai import DownloadApi
|
||||
from langbot.libs.wechatpad_api.api.chatroom import ChatRoomApi
|
||||
|
||||
|
||||
class WeChatPadClient:
|
||||
@@ -16,7 +16,7 @@ import struct
|
||||
from Crypto.Cipher import AES
|
||||
import xml.etree.cElementTree as ET
|
||||
import socket
|
||||
from libs.wecom_ai_bot_api import ierror
|
||||
from langbot.libs.wecom_ai_bot_api import ierror
|
||||
|
||||
|
||||
"""
|
||||
613
src/langbot/libs/wecom_ai_bot_api/api.py
Normal file
613
src/langbot/libs/wecom_ai_bot_api/api.py
Normal file
@@ -0,0 +1,613 @@
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import time
|
||||
import traceback
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ET
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Callable, Optional
|
||||
from urllib.parse import unquote
|
||||
|
||||
import httpx
|
||||
from Crypto.Cipher import AES
|
||||
from quart import Quart, request, Response, jsonify
|
||||
|
||||
from libs.wecom_ai_bot_api import wecombotevent
|
||||
from libs.wecom_ai_bot_api.WXBizMsgCrypt3 import WXBizMsgCrypt
|
||||
from pkg.platform.logger import EventLogger
|
||||
|
||||
|
||||
@dataclass
|
||||
class StreamChunk:
|
||||
"""描述单次推送给企业微信的流式片段。"""
|
||||
|
||||
# 需要返回给企业微信的文本内容
|
||||
content: str
|
||||
|
||||
# 标记是否为最终片段,对应企业微信协议里的 finish 字段
|
||||
is_final: bool = False
|
||||
|
||||
# 预留额外元信息,未来支持多模态扩展时可使用
|
||||
meta: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StreamSession:
|
||||
"""维护一次企业微信流式会话的上下文。"""
|
||||
|
||||
# 企业微信要求的 stream_id,用于标识后续刷新请求
|
||||
stream_id: str
|
||||
|
||||
# 原始消息的 msgid,便于与流水线消息对应
|
||||
msg_id: str
|
||||
|
||||
# 群聊会话标识(单聊时为空)
|
||||
chat_id: Optional[str]
|
||||
|
||||
# 触发消息的发送者
|
||||
user_id: Optional[str]
|
||||
|
||||
# 会话创建时间
|
||||
created_at: float = field(default_factory=time.time)
|
||||
|
||||
# 最近一次被访问的时间,cleanup 依据该值判断过期
|
||||
last_access: float = field(default_factory=time.time)
|
||||
|
||||
# 将流水线增量结果缓存到队列,刷新请求逐条消费
|
||||
queue: asyncio.Queue = field(default_factory=asyncio.Queue)
|
||||
|
||||
# 是否已经完成(收到最终片段)
|
||||
finished: bool = False
|
||||
|
||||
# 缓存最近一次片段,处理重试或超时兜底
|
||||
last_chunk: Optional[StreamChunk] = None
|
||||
|
||||
|
||||
class StreamSessionManager:
|
||||
"""管理 stream 会话的生命周期,并负责队列的生产消费。"""
|
||||
|
||||
def __init__(self, logger: EventLogger, ttl: int = 60) -> None:
|
||||
self.logger = logger
|
||||
|
||||
self.ttl = ttl # 超时时间(秒),超过该时间未被访问的会话会被清理由 cleanup
|
||||
self._sessions: dict[str, StreamSession] = {} # stream_id -> StreamSession 映射
|
||||
self._msg_index: dict[str, str] = {} # msgid -> stream_id 映射,便于流水线根据消息 ID 找到会话
|
||||
|
||||
def get_stream_id_by_msg(self, msg_id: str) -> Optional[str]:
|
||||
if not msg_id:
|
||||
return None
|
||||
return self._msg_index.get(msg_id)
|
||||
|
||||
def get_session(self, stream_id: str) -> Optional[StreamSession]:
|
||||
return self._sessions.get(stream_id)
|
||||
|
||||
def create_or_get(self, msg_json: dict[str, Any]) -> tuple[StreamSession, bool]:
|
||||
"""根据企业微信回调创建或获取会话。
|
||||
|
||||
Args:
|
||||
msg_json: 企业微信解密后的回调 JSON。
|
||||
|
||||
Returns:
|
||||
Tuple[StreamSession, bool]: `StreamSession` 为会话实例,`bool` 指示是否为新建会话。
|
||||
|
||||
Example:
|
||||
在首次回调中调用,得到 `is_new=True` 后再触发流水线。
|
||||
"""
|
||||
msg_id = msg_json.get('msgid', '')
|
||||
if msg_id and msg_id in self._msg_index:
|
||||
stream_id = self._msg_index[msg_id]
|
||||
session = self._sessions.get(stream_id)
|
||||
if session:
|
||||
session.last_access = time.time()
|
||||
return session, False
|
||||
|
||||
stream_id = str(uuid.uuid4())
|
||||
session = StreamSession(
|
||||
stream_id=stream_id,
|
||||
msg_id=msg_id,
|
||||
chat_id=msg_json.get('chatid'),
|
||||
user_id=msg_json.get('from', {}).get('userid'),
|
||||
)
|
||||
|
||||
if msg_id:
|
||||
self._msg_index[msg_id] = stream_id
|
||||
self._sessions[stream_id] = session
|
||||
return session, True
|
||||
|
||||
async def publish(self, stream_id: str, chunk: StreamChunk) -> bool:
|
||||
"""向 stream 队列写入新的增量片段。
|
||||
|
||||
Args:
|
||||
stream_id: 企业微信分配的流式会话 ID。
|
||||
chunk: 待发送的增量片段。
|
||||
|
||||
Returns:
|
||||
bool: 当流式队列存在并成功入队时返回 True。
|
||||
|
||||
Example:
|
||||
在收到模型增量后调用 `await manager.publish('sid', StreamChunk('hello'))`。
|
||||
"""
|
||||
session = self._sessions.get(stream_id)
|
||||
if not session:
|
||||
return False
|
||||
|
||||
session.last_access = time.time()
|
||||
session.last_chunk = chunk
|
||||
|
||||
try:
|
||||
session.queue.put_nowait(chunk)
|
||||
except asyncio.QueueFull:
|
||||
# 默认无界队列,此处兜底防御
|
||||
await session.queue.put(chunk)
|
||||
|
||||
if chunk.is_final:
|
||||
session.finished = True
|
||||
|
||||
return True
|
||||
|
||||
async def consume(self, stream_id: str, timeout: float = 0.5) -> Optional[StreamChunk]:
|
||||
"""从队列中取出一个片段,若超时返回 None。
|
||||
|
||||
Args:
|
||||
stream_id: 企业微信流式会话 ID。
|
||||
timeout: 取片段的最长等待时间(秒)。
|
||||
|
||||
Returns:
|
||||
Optional[StreamChunk]: 成功时返回片段,超时或会话不存在时返回 None。
|
||||
|
||||
Example:
|
||||
企业微信刷新到达时调用,若队列有数据则立即返回 `StreamChunk`。
|
||||
"""
|
||||
session = self._sessions.get(stream_id)
|
||||
if not session:
|
||||
return None
|
||||
|
||||
session.last_access = time.time()
|
||||
|
||||
try:
|
||||
chunk = await asyncio.wait_for(session.queue.get(), timeout)
|
||||
session.last_access = time.time()
|
||||
if chunk.is_final:
|
||||
session.finished = True
|
||||
return chunk
|
||||
except asyncio.TimeoutError:
|
||||
if session.finished and session.last_chunk:
|
||||
return session.last_chunk
|
||||
return None
|
||||
|
||||
def mark_finished(self, stream_id: str) -> None:
|
||||
session = self._sessions.get(stream_id)
|
||||
if session:
|
||||
session.finished = True
|
||||
session.last_access = time.time()
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""定期清理过期会话,防止队列与映射无上限累积。"""
|
||||
now = time.time()
|
||||
expired: list[str] = []
|
||||
for stream_id, session in self._sessions.items():
|
||||
if now - session.last_access > self.ttl:
|
||||
expired.append(stream_id)
|
||||
|
||||
for stream_id in expired:
|
||||
session = self._sessions.pop(stream_id, None)
|
||||
if not session:
|
||||
continue
|
||||
msg_id = session.msg_id
|
||||
if msg_id and self._msg_index.get(msg_id) == stream_id:
|
||||
self._msg_index.pop(msg_id, None)
|
||||
|
||||
|
||||
class WecomBotClient:
|
||||
def __init__(self, Token: str, EnCodingAESKey: str, Corpid: str, logger: EventLogger, unified_mode: bool = False):
|
||||
"""企业微信智能机器人客户端。
|
||||
|
||||
Args:
|
||||
Token: 企业微信回调验证使用的 token。
|
||||
EnCodingAESKey: 企业微信消息加解密密钥。
|
||||
Corpid: 企业 ID。
|
||||
logger: 日志记录器。
|
||||
unified_mode: 是否使用统一 webhook 模式(默认 False)。
|
||||
|
||||
Example:
|
||||
>>> client = WecomBotClient(Token='token', EnCodingAESKey='aeskey', Corpid='corp', logger=logger)
|
||||
"""
|
||||
|
||||
self.Token = Token
|
||||
self.EnCodingAESKey = EnCodingAESKey
|
||||
self.Corpid = Corpid
|
||||
self.ReceiveId = ''
|
||||
self.unified_mode = unified_mode
|
||||
self.app = Quart(__name__)
|
||||
|
||||
# 只有在非统一模式下才注册独立路由
|
||||
if not self.unified_mode:
|
||||
self.app.add_url_rule(
|
||||
'/callback/command',
|
||||
'handle_callback',
|
||||
self.handle_callback_request,
|
||||
methods=['POST', 'GET']
|
||||
)
|
||||
|
||||
self._message_handlers = {
|
||||
'example': [],
|
||||
}
|
||||
self.logger = logger
|
||||
self.generated_content: dict[str, str] = {}
|
||||
self.msg_id_map: dict[str, int] = {}
|
||||
self.stream_sessions = StreamSessionManager(logger=logger)
|
||||
self.stream_poll_timeout = 0.5
|
||||
|
||||
@staticmethod
|
||||
def _build_stream_payload(stream_id: str, content: str, finish: bool) -> dict[str, Any]:
|
||||
"""按照企业微信协议拼装返回报文。
|
||||
|
||||
Args:
|
||||
stream_id: 企业微信会话 ID。
|
||||
content: 推送的文本内容。
|
||||
finish: 是否为最终片段。
|
||||
|
||||
Returns:
|
||||
dict[str, Any]: 可直接加密返回的 payload。
|
||||
|
||||
Example:
|
||||
组装 `{'msgtype': 'stream', 'stream': {'id': 'sid', ...}}` 结构。
|
||||
"""
|
||||
return {
|
||||
'msgtype': 'stream',
|
||||
'stream': {
|
||||
'id': stream_id,
|
||||
'finish': finish,
|
||||
'content': content,
|
||||
},
|
||||
}
|
||||
|
||||
async def _encrypt_and_reply(self, payload: dict[str, Any], nonce: str) -> tuple[Response, int]:
|
||||
"""对响应进行加密封装并返回给企业微信。
|
||||
|
||||
Args:
|
||||
payload: 待加密的响应内容。
|
||||
nonce: 企业微信回调参数中的 nonce。
|
||||
|
||||
Returns:
|
||||
Tuple[Response, int]: Quart Response 对象及状态码。
|
||||
|
||||
Example:
|
||||
在首包或刷新场景中调用以生成加密响应。
|
||||
"""
|
||||
reply_plain_str = json.dumps(payload, ensure_ascii=False)
|
||||
reply_timestamp = str(int(time.time()))
|
||||
ret, encrypt_text = self.wxcpt.EncryptMsg(reply_plain_str, nonce, reply_timestamp)
|
||||
if ret != 0:
|
||||
await self.logger.error(f'加密失败: {ret}')
|
||||
return jsonify({'error': 'encrypt_failed'}), 500
|
||||
|
||||
root = ET.fromstring(encrypt_text)
|
||||
encrypt = root.find('Encrypt').text
|
||||
resp = {
|
||||
'encrypt': encrypt,
|
||||
}
|
||||
return jsonify(resp), 200
|
||||
|
||||
async def _dispatch_event(self, event: wecombotevent.WecomBotEvent) -> None:
|
||||
"""异步触发流水线处理,避免阻塞首包响应。
|
||||
|
||||
Args:
|
||||
event: 由企业微信消息转换的内部事件对象。
|
||||
"""
|
||||
try:
|
||||
await self._handle_message(event)
|
||||
except Exception:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
|
||||
async def _handle_post_initial_response(self, msg_json: dict[str, Any], nonce: str) -> tuple[Response, int]:
|
||||
"""处理企业微信首次推送的消息,返回 stream_id 并开启流水线。
|
||||
|
||||
Args:
|
||||
msg_json: 解密后的企业微信消息 JSON。
|
||||
nonce: 企业微信回调参数 nonce。
|
||||
|
||||
Returns:
|
||||
Tuple[Response, int]: Quart Response 及状态码。
|
||||
|
||||
Example:
|
||||
首次回调时调用,立即返回带 `stream_id` 的响应。
|
||||
"""
|
||||
session, is_new = self.stream_sessions.create_or_get(msg_json)
|
||||
|
||||
message_data = await self.get_message(msg_json)
|
||||
if message_data:
|
||||
message_data['stream_id'] = session.stream_id
|
||||
try:
|
||||
event = wecombotevent.WecomBotEvent(message_data)
|
||||
except Exception:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
else:
|
||||
if is_new:
|
||||
asyncio.create_task(self._dispatch_event(event))
|
||||
|
||||
payload = self._build_stream_payload(session.stream_id, '', False)
|
||||
return await self._encrypt_and_reply(payload, nonce)
|
||||
|
||||
async def _handle_post_followup_response(self, msg_json: dict[str, Any], nonce: str) -> tuple[Response, int]:
|
||||
"""处理企业微信的流式刷新请求,按需返回增量片段。
|
||||
|
||||
Args:
|
||||
msg_json: 解密后的企业微信刷新请求。
|
||||
nonce: 企业微信回调参数 nonce。
|
||||
|
||||
Returns:
|
||||
Tuple[Response, int]: Quart Response 及状态码。
|
||||
|
||||
Example:
|
||||
在刷新请求中调用,按需返回增量片段。
|
||||
"""
|
||||
stream_info = msg_json.get('stream', {})
|
||||
stream_id = stream_info.get('id', '')
|
||||
if not stream_id:
|
||||
await self.logger.error('刷新请求缺少 stream.id')
|
||||
return await self._encrypt_and_reply(self._build_stream_payload('', '', True), nonce)
|
||||
|
||||
session = self.stream_sessions.get_session(stream_id)
|
||||
chunk = await self.stream_sessions.consume(stream_id, timeout=self.stream_poll_timeout)
|
||||
|
||||
if not chunk:
|
||||
cached_content = None
|
||||
if session and session.msg_id:
|
||||
cached_content = self.generated_content.pop(session.msg_id, None)
|
||||
if cached_content is not None:
|
||||
chunk = StreamChunk(content=cached_content, is_final=True)
|
||||
else:
|
||||
payload = self._build_stream_payload(stream_id, '', False)
|
||||
return await self._encrypt_and_reply(payload, nonce)
|
||||
|
||||
payload = self._build_stream_payload(stream_id, chunk.content, chunk.is_final)
|
||||
if chunk.is_final:
|
||||
self.stream_sessions.mark_finished(stream_id)
|
||||
return await self._encrypt_and_reply(payload, nonce)
|
||||
|
||||
async def handle_callback_request(self):
|
||||
"""企业微信回调入口(独立端口模式,使用全局 request)。
|
||||
|
||||
Returns:
|
||||
Quart Response: 根据请求类型返回验证、首包或刷新结果。
|
||||
|
||||
Example:
|
||||
作为 Quart 路由处理函数直接注册并使用。
|
||||
"""
|
||||
return await self._handle_callback_internal(request)
|
||||
|
||||
async def handle_unified_webhook(self, req):
|
||||
"""处理回调请求(统一 webhook 模式,显式传递 request)。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
|
||||
Returns:
|
||||
响应数据
|
||||
"""
|
||||
return await self._handle_callback_internal(req)
|
||||
|
||||
async def _handle_callback_internal(self, req):
|
||||
"""处理回调请求的内部实现,包括 GET 验证和 POST 消息接收。
|
||||
|
||||
Args:
|
||||
req: Quart Request 对象
|
||||
"""
|
||||
try:
|
||||
self.wxcpt = WXBizMsgCrypt(self.Token, self.EnCodingAESKey, '')
|
||||
await self.logger.info(f'{req.method} {req.url} {str(req.args)}')
|
||||
|
||||
if req.method == 'GET':
|
||||
return await self._handle_get_callback(req)
|
||||
|
||||
if req.method == 'POST':
|
||||
return await self._handle_post_callback(req)
|
||||
|
||||
return Response('', status=405)
|
||||
|
||||
except Exception:
|
||||
await self.logger.error(traceback.format_exc())
|
||||
return Response('Internal Server Error', status=500)
|
||||
|
||||
async def _handle_get_callback(self, req) -> tuple[Response, int] | Response:
|
||||
"""处理企业微信的 GET 验证请求。"""
|
||||
|
||||
msg_signature = unquote(req.args.get('msg_signature', ''))
|
||||
timestamp = unquote(req.args.get('timestamp', ''))
|
||||
nonce = unquote(req.args.get('nonce', ''))
|
||||
echostr = unquote(req.args.get('echostr', ''))
|
||||
|
||||
if not all([msg_signature, timestamp, nonce, echostr]):
|
||||
await self.logger.error('请求参数缺失')
|
||||
return Response('缺少参数', status=400)
|
||||
|
||||
ret, decrypted_str = self.wxcpt.VerifyURL(msg_signature, timestamp, nonce, echostr)
|
||||
if ret != 0:
|
||||
await self.logger.error('验证URL失败')
|
||||
return Response('验证失败', status=403)
|
||||
|
||||
return Response(decrypted_str, mimetype='text/plain')
|
||||
|
||||
async def _handle_post_callback(self, req) -> tuple[Response, int] | Response:
|
||||
"""处理企业微信的 POST 回调请求。"""
|
||||
|
||||
self.stream_sessions.cleanup()
|
||||
|
||||
msg_signature = unquote(req.args.get('msg_signature', ''))
|
||||
timestamp = unquote(req.args.get('timestamp', ''))
|
||||
nonce = unquote(req.args.get('nonce', ''))
|
||||
|
||||
encrypted_json = await req.get_json()
|
||||
encrypted_msg = (encrypted_json or {}).get('encrypt', '')
|
||||
if not encrypted_msg:
|
||||
await self.logger.error("请求体中缺少 'encrypt' 字段")
|
||||
return Response('Bad Request', status=400)
|
||||
|
||||
xml_post_data = f"<xml><Encrypt><![CDATA[{encrypted_msg}]]></Encrypt></xml>"
|
||||
ret, decrypted_xml = self.wxcpt.DecryptMsg(xml_post_data, msg_signature, timestamp, nonce)
|
||||
if ret != 0:
|
||||
await self.logger.error('解密失败')
|
||||
return Response('解密失败', status=400)
|
||||
|
||||
msg_json = json.loads(decrypted_xml)
|
||||
|
||||
if msg_json.get('msgtype') == 'stream':
|
||||
return await self._handle_post_followup_response(msg_json, nonce)
|
||||
|
||||
return await self._handle_post_initial_response(msg_json, nonce)
|
||||
|
||||
async def get_message(self, msg_json):
|
||||
message_data = {}
|
||||
|
||||
if msg_json.get('chattype', '') == 'single':
|
||||
message_data['type'] = 'single'
|
||||
elif msg_json.get('chattype', '') == 'group':
|
||||
message_data['type'] = 'group'
|
||||
|
||||
if msg_json.get('msgtype') == 'text':
|
||||
message_data['content'] = msg_json.get('text', {}).get('content')
|
||||
elif msg_json.get('msgtype') == 'image':
|
||||
picurl = msg_json.get('image', {}).get('url', '')
|
||||
base64 = await self.download_url_to_base64(picurl, self.EnCodingAESKey)
|
||||
message_data['picurl'] = base64
|
||||
elif msg_json.get('msgtype') == 'mixed':
|
||||
items = msg_json.get('mixed', {}).get('msg_item', [])
|
||||
texts = []
|
||||
picurl = None
|
||||
for item in items:
|
||||
if item.get('msgtype') == 'text':
|
||||
texts.append(item.get('text', {}).get('content', ''))
|
||||
elif item.get('msgtype') == 'image' and picurl is None:
|
||||
picurl = item.get('image', {}).get('url')
|
||||
|
||||
if texts:
|
||||
message_data['content'] = "".join(texts) # 拼接所有 text
|
||||
if picurl:
|
||||
base64 = await self.download_url_to_base64(picurl, self.EnCodingAESKey)
|
||||
message_data['picurl'] = base64 # 只保留第一个 image
|
||||
|
||||
# Extract user information
|
||||
from_info = msg_json.get('from', {})
|
||||
message_data['userid'] = from_info.get('userid', '')
|
||||
message_data['username'] = from_info.get('alias', '') or from_info.get('name', '') or from_info.get('userid', '')
|
||||
|
||||
# Extract chat/group information
|
||||
if msg_json.get('chattype', '') == 'group':
|
||||
message_data['chatid'] = msg_json.get('chatid', '')
|
||||
# Try to get group name if available
|
||||
message_data['chatname'] = msg_json.get('chatname', '') or msg_json.get('chatid', '')
|
||||
|
||||
message_data['msgid'] = msg_json.get('msgid', '')
|
||||
|
||||
if msg_json.get('aibotid'):
|
||||
message_data['aibotid'] = msg_json.get('aibotid', '')
|
||||
|
||||
return message_data
|
||||
|
||||
async def _handle_message(self, event: wecombotevent.WecomBotEvent):
|
||||
"""
|
||||
处理消息事件。
|
||||
"""
|
||||
try:
|
||||
message_id = event.message_id
|
||||
if message_id in self.msg_id_map.keys():
|
||||
self.msg_id_map[message_id] += 1
|
||||
return
|
||||
self.msg_id_map[message_id] = 1
|
||||
msg_type = event.type
|
||||
if msg_type in self._message_handlers:
|
||||
for handler in self._message_handlers[msg_type]:
|
||||
await handler(event)
|
||||
except Exception:
|
||||
print(traceback.format_exc())
|
||||
|
||||
async def push_stream_chunk(self, msg_id: str, content: str, is_final: bool = False) -> bool:
|
||||
"""将流水线片段推送到 stream 会话。
|
||||
|
||||
Args:
|
||||
msg_id: 原始企业微信消息 ID。
|
||||
content: 模型产生的片段内容。
|
||||
is_final: 是否为最终片段。
|
||||
|
||||
Returns:
|
||||
bool: 当成功写入流式队列时返回 True。
|
||||
|
||||
Example:
|
||||
在流水线 `reply_message_chunk` 中调用,将增量推送至企业微信。
|
||||
"""
|
||||
# 根据 msg_id 找到对应 stream 会话,如果不存在说明当前消息非流式
|
||||
stream_id = self.stream_sessions.get_stream_id_by_msg(msg_id)
|
||||
if not stream_id:
|
||||
return False
|
||||
|
||||
chunk = StreamChunk(content=content, is_final=is_final)
|
||||
await self.stream_sessions.publish(stream_id, chunk)
|
||||
if is_final:
|
||||
self.stream_sessions.mark_finished(stream_id)
|
||||
return True
|
||||
|
||||
async def set_message(self, msg_id: str, content: str):
|
||||
"""兼容旧逻辑:若无法流式返回则缓存最终结果。
|
||||
|
||||
Args:
|
||||
msg_id: 企业微信消息 ID。
|
||||
content: 最终回复的文本内容。
|
||||
|
||||
Example:
|
||||
在非流式场景下缓存最终结果以备刷新时返回。
|
||||
"""
|
||||
handled = await self.push_stream_chunk(msg_id, content, is_final=True)
|
||||
if not handled:
|
||||
self.generated_content[msg_id] = content
|
||||
|
||||
def on_message(self, msg_type: str):
|
||||
def decorator(func: Callable[[wecombotevent.WecomBotEvent], None]):
|
||||
if msg_type not in self._message_handlers:
|
||||
self._message_handlers[msg_type] = []
|
||||
self._message_handlers[msg_type].append(func)
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
async def download_url_to_base64(self, download_url, encoding_aes_key):
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(download_url)
|
||||
if response.status_code != 200:
|
||||
await self.logger.error(f'failed to get file: {response.text}')
|
||||
return None
|
||||
|
||||
encrypted_bytes = response.content
|
||||
|
||||
aes_key = base64.b64decode(encoding_aes_key + "=") # base64 补齐
|
||||
iv = aes_key[:16]
|
||||
|
||||
cipher = AES.new(aes_key, AES.MODE_CBC, iv)
|
||||
decrypted = cipher.decrypt(encrypted_bytes)
|
||||
|
||||
pad_len = decrypted[-1]
|
||||
decrypted = decrypted[:-pad_len]
|
||||
|
||||
if decrypted.startswith(b"\xff\xd8"): # JPEG
|
||||
mime_type = "image/jpeg"
|
||||
elif decrypted.startswith(b"\x89PNG"): # PNG
|
||||
mime_type = "image/png"
|
||||
elif decrypted.startswith((b"GIF87a", b"GIF89a")): # GIF
|
||||
mime_type = "image/gif"
|
||||
elif decrypted.startswith(b"BM"): # BMP
|
||||
mime_type = "image/bmp"
|
||||
elif decrypted.startswith(b"II*\x00") or decrypted.startswith(b"MM\x00*"): # TIFF
|
||||
mime_type = "image/tiff"
|
||||
else:
|
||||
mime_type = "application/octet-stream"
|
||||
|
||||
# 转 base64
|
||||
base64_str = base64.b64encode(decrypted).decode("utf-8")
|
||||
return f"data:{mime_type};base64,{base64_str}"
|
||||
|
||||
async def run_task(self, host: str, port: int, *args, **kwargs):
|
||||
"""
|
||||
启动 Quart 应用。
|
||||
"""
|
||||
await self.app.run_task(host=host, port=port, *args, **kwargs)
|
||||
@@ -22,7 +22,21 @@ class WecomBotEvent(dict):
|
||||
"""
|
||||
用户id
|
||||
"""
|
||||
return self.get('from', {}).get('userid', '')
|
||||
return self.get('from', {}).get('userid', '') or self.get('userid', '')
|
||||
|
||||
@property
|
||||
def username(self) -> str:
|
||||
"""
|
||||
用户名称
|
||||
"""
|
||||
return self.get('username', '') or self.get('from', {}).get('alias', '') or self.get('from', {}).get('name', '') or self.userid
|
||||
|
||||
@property
|
||||
def chatname(self) -> str:
|
||||
"""
|
||||
群组名称
|
||||
"""
|
||||
return self.get('chatname', '') or str(self.chatid)
|
||||
|
||||
@property
|
||||
def content(self) -> str:
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user