mirror of
https://github.com/langbot-app/LangBot.git
synced 2025-11-25 19:37:36 +08:00
Compare commits
68 Commits
v3.4.0
...
v3.4.1.6-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed719fd44e | ||
|
|
5dc6bed0d1 | ||
|
|
b1244a4d4e | ||
|
|
6aa325a4b1 | ||
|
|
88a11561f9 | ||
|
|
fd30022065 | ||
|
|
9486312737 | ||
|
|
e37070a985 | ||
|
|
ffb98ecca2 | ||
|
|
29bd69ef97 | ||
|
|
e46c9530cc | ||
|
|
7ddd303e2d | ||
|
|
66798a1d0f | ||
|
|
bd05afdf14 | ||
|
|
136e48f7ee | ||
|
|
facb5f177a | ||
|
|
10ce31cc46 | ||
|
|
3b4f3c516b | ||
|
|
a1e3981ce4 | ||
|
|
89f26781fe | ||
|
|
914292a80b | ||
|
|
8227e3299b | ||
|
|
07ca48d652 | ||
|
|
243f45c7db | ||
|
|
12cfce3622 | ||
|
|
535c4a8a11 | ||
|
|
6606c671b2 | ||
|
|
242f24840d | ||
|
|
486f636b2d | ||
|
|
b293d7a7cd | ||
|
|
f4fa0b42a6 | ||
|
|
209e89712d | ||
|
|
3314a7a9e9 | ||
|
|
793d64303e | ||
|
|
6642498f00 | ||
|
|
32b400dcb1 | ||
|
|
0dcd2d8179 | ||
|
|
736f8b613c | ||
|
|
9e7d9a937d | ||
|
|
4767983279 | ||
|
|
e37f35d95a | ||
|
|
ad1e609fb9 | ||
|
|
f9bc4a5acd | ||
|
|
2b79185f6a | ||
|
|
840f638472 | ||
|
|
908169a55e | ||
|
|
dbf9f2398e | ||
|
|
2ea3ff0b5c | ||
|
|
91bf72c710 | ||
|
|
baabb70622 | ||
|
|
94ea64a6a9 | ||
|
|
f97896b2c7 | ||
|
|
9027db8587 | ||
|
|
cd46e1c131 | ||
|
|
59211191a4 | ||
|
|
a3ca7e82c7 | ||
|
|
0094056def | ||
|
|
a9f305a1c6 | ||
|
|
e8cc048901 | ||
|
|
05da43f606 | ||
|
|
a81faa7d8e | ||
|
|
18ba7d1da7 | ||
|
|
875adfcbaa | ||
|
|
6e9c213893 | ||
|
|
753066ccb9 | ||
|
|
8b36782c25 | ||
|
|
da9dde6bd2 | ||
|
|
07f6e69b93 |
27
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
27
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -1,39 +1,38 @@
|
|||||||
name: 漏洞反馈
|
name: 漏洞反馈
|
||||||
description: 报错或漏洞请使用这个模板创建,不使用此模板创建的异常、漏洞相关issue将被直接关闭
|
description: 报错或漏洞请使用这个模板创建,不使用此模板创建的异常、漏洞相关issue将被直接关闭。由于自己操作不当/不甚了解所用技术栈引起的网络连接问题恕无法解决,请勿提 issue。容器间网络连接问题,参考文档 https://docs.langbot.app/deploy/network-details.html
|
||||||
title: "[Bug]: "
|
title: "[Bug]: "
|
||||||
labels: ["bug?"]
|
labels: ["bug?"]
|
||||||
body:
|
body:
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
attributes:
|
attributes:
|
||||||
label: 消息平台适配器
|
label: 消息平台适配器
|
||||||
description: "连接QQ使用的框架"
|
description: "接入的消息平台类型"
|
||||||
options:
|
options:
|
||||||
|
- 其他(或暂未使用)
|
||||||
- Nakuru(go-cqhttp)
|
- Nakuru(go-cqhttp)
|
||||||
- aiocqhttp(使用 OneBot 协议接入的)
|
- aiocqhttp(使用 OneBot 协议接入的)
|
||||||
- qq-botpy(QQ官方API)
|
- qq-botpy(QQ官方API)
|
||||||
- yiri-mirai(Mirai)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: 运行环境
|
|
||||||
description: 操作系统、系统架构、**Python版本**、**主机地理位置**
|
|
||||||
placeholder: 例如: CentOS x64 Python 3.10.3、Docker 的直接写 Docker 就行
|
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: input
|
- type: input
|
||||||
attributes:
|
attributes:
|
||||||
label: LangBot 版本
|
label: 运行环境
|
||||||
description: LangBot (QChatGPT) 版本号
|
description: LangBot 版本、操作系统、系统架构、**Python版本**、**主机地理位置**
|
||||||
placeholder: 例如:v3.3.0,可以使用`!version`命令查看,或者到 pkg/utils/constants.py 查看
|
placeholder: 例如:v3.3.0、CentOS x64 Python 3.10.3、Docker 的系统直接写 Docker 就行
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: 异常情况
|
label: 异常情况
|
||||||
description: 完整描述异常情况,什么时候发生的、发生了什么。**请附带日志信息。**
|
description: 完整描述异常情况,什么时候发生的、发生了什么。**请附带日志信息。**
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: 复现步骤
|
||||||
|
description: 如何重现这个问题,越详细越好
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: 启用的插件
|
label: 启用的插件
|
||||||
|
|||||||
5
.github/workflows/build-dev-image.yaml
vendored
5
.github/workflows/build-dev-image.yaml
vendored
@@ -7,9 +7,14 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
build-dev-image:
|
build-dev-image:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
# 如果是tag则跳过
|
||||||
|
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Generate Tag
|
- name: Generate Tag
|
||||||
id: generate_tag
|
id: generate_tag
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
3
.github/workflows/build-docker-image.yml
vendored
3
.github/workflows/build-docker-image.yml
vendored
@@ -13,6 +13,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: judge has env GITHUB_REF # 如果没有GITHUB_REF环境变量,则把github.ref变量赋值给GITHUB_REF
|
- name: judge has env GITHUB_REF # 如果没有GITHUB_REF环境变量,则把github.ref变量赋值给GITHUB_REF
|
||||||
run: |
|
run: |
|
||||||
if [ -z "$GITHUB_REF" ]; then
|
if [ -z "$GITHUB_REF" ]; then
|
||||||
|
|||||||
10
.github/workflows/build-release-artifacts.yaml
vendored
10
.github/workflows/build-release-artifacts.yaml
vendored
@@ -12,6 +12,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Check version
|
- name: Check version
|
||||||
id: check_version
|
id: check_version
|
||||||
@@ -50,3 +52,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: langbot-${{ steps.check_version.outputs.version }}-all
|
name: langbot-${{ steps.check_version.outputs.version }}-all
|
||||||
path: .
|
path: .
|
||||||
|
|
||||||
|
- name: Upload To Release
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.RELEASE_UPLOAD_GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
# 本目录下所有文件打包成zip
|
||||||
|
zip -r langbot-${{ steps.check_version.outputs.version }}-all.zip .
|
||||||
|
gh release upload ${{ github.event.release.tag_name }} langbot-${{ steps.check_version.outputs.version }}-all.zip
|
||||||
|
|||||||
80
.github/workflows/test-pr.yml
vendored
80
.github/workflows/test-pr.yml
vendored
@@ -1,80 +0,0 @@
|
|||||||
name: Test Pull Request
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [ready_for_review]
|
|
||||||
paths:
|
|
||||||
# 任何py文件改动都会触发
|
|
||||||
- '**.py'
|
|
||||||
pull_request_review:
|
|
||||||
types: [submitted]
|
|
||||||
issue_comment:
|
|
||||||
types: [created]
|
|
||||||
# 允许手动触发
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
perform-test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# 如果事件为pull_request_review且review状态为approved,则执行
|
|
||||||
if: >
|
|
||||||
github.event_name == 'pull_request' ||
|
|
||||||
(github.event_name == 'pull_request_review' && github.event.review.state == 'APPROVED') ||
|
|
||||||
github.event_name == 'workflow_dispatch' ||
|
|
||||||
(github.event_name == 'issue_comment' && github.event.issue.pull_request != '' && contains(github.event.comment.body, '/test') && github.event.comment.user.login == 'RockChinQ')
|
|
||||||
steps:
|
|
||||||
# 签出测试工程仓库代码
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
# 仓库地址
|
|
||||||
repository: RockChinQ/qcg-tester
|
|
||||||
# 仓库路径
|
|
||||||
path: qcg-tester
|
|
||||||
- name: Setup Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.10'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
cd qcg-tester
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r requirements.txt
|
|
||||||
|
|
||||||
- name: Get PR details
|
|
||||||
id: get-pr
|
|
||||||
if: github.event_name == 'issue_comment'
|
|
||||||
uses: octokit/request-action@v2.x
|
|
||||||
with:
|
|
||||||
route: GET /repos/${{ github.repository }}/pulls/${{ github.event.issue.number }}
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set PR source branch as env variable
|
|
||||||
if: github.event_name == 'issue_comment'
|
|
||||||
run: |
|
|
||||||
PR_SOURCE_BRANCH=$(echo '${{ steps.get-pr.outputs.data }}' | jq -r '.head.ref')
|
|
||||||
echo "BRANCH=$PR_SOURCE_BRANCH" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Set PR Branch as bash env
|
|
||||||
if: github.event_name != 'issue_comment'
|
|
||||||
run: |
|
|
||||||
echo "BRANCH=${{ github.head_ref }}" >> $GITHUB_ENV
|
|
||||||
- name: Set OpenAI API Key from Secrets
|
|
||||||
run: |
|
|
||||||
echo "OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> $GITHUB_ENV
|
|
||||||
- name: Set OpenAI Reverse Proxy URL from Secrets
|
|
||||||
run: |
|
|
||||||
echo "OPENAI_REVERSE_PROXY=${{ secrets.OPENAI_REVERSE_PROXY }}" >> $GITHUB_ENV
|
|
||||||
- name: Run test
|
|
||||||
run: |
|
|
||||||
cd qcg-tester
|
|
||||||
python main.py
|
|
||||||
|
|
||||||
- name: Upload coverage reports to Codecov
|
|
||||||
run: |
|
|
||||||
cd qcg-tester/resource/QChatGPT
|
|
||||||
curl -Os https://uploader.codecov.io/latest/linux/codecov
|
|
||||||
chmod +x codecov
|
|
||||||
./codecov -t ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -2,7 +2,6 @@
|
|||||||
.idea/
|
.idea/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
database.db
|
database.db
|
||||||
qchatgpt.log
|
|
||||||
langbot.log
|
langbot.log
|
||||||
/banlist.py
|
/banlist.py
|
||||||
/plugins/
|
/plugins/
|
||||||
@@ -17,8 +16,7 @@ scenario/
|
|||||||
!scenario/default-template.json
|
!scenario/default-template.json
|
||||||
override.json
|
override.json
|
||||||
cookies.json
|
cookies.json
|
||||||
res/announcement_saved
|
data/labels/announcement_saved.json
|
||||||
res/announcement_saved.json
|
|
||||||
cmdpriv.json
|
cmdpriv.json
|
||||||
tips.py
|
tips.py
|
||||||
.venv
|
.venv
|
||||||
@@ -32,7 +30,7 @@ claude.json
|
|||||||
bard.json
|
bard.json
|
||||||
/*yaml
|
/*yaml
|
||||||
!/docker-compose.yaml
|
!/docker-compose.yaml
|
||||||
res/instance_id.json
|
data/labels/instance_id.json
|
||||||
.DS_Store
|
.DS_Store
|
||||||
/data
|
/data
|
||||||
botpy.log*
|
botpy.log*
|
||||||
|
|||||||
100
README.md
100
README.md
@@ -1,33 +1,16 @@
|
|||||||
|
> [!IMPORTANT]
|
||||||
|
> 我们被人在 X.com 和 pump.fun 上冒充了,以下两个账号利用本项目和作者信息在 X.com 上发布数字货币营销信息,请勿相信!我们已向 X 官方举报!我们从未以 LangBot 名义创建任何社交媒体账号或者数字货币。
|
||||||
|
> We have been impersonated on X.com and pump.fun . The following two accounts are using this project and author information to post digital currency marketing information on X.com. Please do not believe that! We have reported to X official! We have never created any social media account or digital currency under the name LangBot.
|
||||||
|
> 1. https://x.com/RockChinQ
|
||||||
|
> 2. https://x.com/LangBotAI
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="https://docs.langbot.app/chrome-512.png" alt="QChatGPT" width="180" />
|
<img src="https://docs.langbot.app/social.png" alt="LangBot"/>
|
||||||
</p>
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
# LangBot
|
|
||||||
|
|
||||||
<a href="https://trendshift.io/repositories/6187" target="_blank"><img src="https://trendshift.io/api/badge/repositories/6187" alt="RockChinQ%2FQChatGPT | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
<a href="https://trendshift.io/repositories/6187" target="_blank"><img src="https://trendshift.io/api/badge/repositories/6187" alt="RockChinQ%2FQChatGPT | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||||
|
|
||||||
[](https://github.com/RockChinQ/LangBot/releases/latest)
|
|
||||||
<a href="https://hub.docker.com/repository/docker/rockchin/langbot">
|
|
||||||
<img src="https://img.shields.io/docker/pulls/rockchin/langbot?color=blue" alt="docker pull">
|
|
||||||
</a>
|
|
||||||

|
|
||||||

|
|
||||||
<br/>
|
|
||||||
<img src="https://img.shields.io/badge/python-3.10 | 3.11 | 3.12-blue.svg" alt="python">
|
|
||||||
<a href="http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=66-aWvn8cbP4c1ut_1YYkvvGVeEtyTH8&authKey=pTaKBK5C%2B8dFzQ4XlENf6MHTCLaHnlKcCRx7c14EeVVlpX2nRSaS8lJm8YeM4mCU&noverify=0&group_code=195992197">
|
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/%E5%AE%98%E6%96%B9%E7%BE%A4-195992197-purple">
|
|
||||||
</a>
|
|
||||||
<a href="https://qm.qq.com/q/PClALFK242">
|
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/%E7%A4%BE%E5%8C%BA%E7%BE%A4-619154800-purple">
|
|
||||||
</a>
|
|
||||||
<a href="https://codecov.io/gh/RockChinQ/QChatGPT" >
|
|
||||||
<img src="https://codecov.io/gh/RockChinQ/QChatGPT/graph/badge.svg?token=pjxYIL2kbC"/>
|
|
||||||
</a>
|
|
||||||
|
|
||||||
## 使用文档
|
|
||||||
|
|
||||||
<a href="https://docs.langbot.app">项目主页</a> |
|
<a href="https://docs.langbot.app">项目主页</a> |
|
||||||
<a href="https://docs.langbot.app/insight/intro.htmll">功能介绍</a> |
|
<a href="https://docs.langbot.app/insight/intro.htmll">功能介绍</a> |
|
||||||
<a href="https://docs.langbot.app/insight/guide.html">部署文档</a> |
|
<a href="https://docs.langbot.app/insight/guide.html">部署文档</a> |
|
||||||
@@ -35,12 +18,67 @@
|
|||||||
<a href="https://docs.langbot.app/plugin/plugin-intro.html">插件介绍</a> |
|
<a href="https://docs.langbot.app/plugin/plugin-intro.html">插件介绍</a> |
|
||||||
<a href="https://github.com/RockChinQ/LangBot/issues/new?assignees=&labels=%E7%8B%AC%E7%AB%8B%E6%8F%92%E4%BB%B6&projects=&template=submit-plugin.yml&title=%5BPlugin%5D%3A+%E8%AF%B7%E6%B1%82%E7%99%BB%E8%AE%B0%E6%96%B0%E6%8F%92%E4%BB%B6">提交插件</a>
|
<a href="https://github.com/RockChinQ/LangBot/issues/new?assignees=&labels=%E7%8B%AC%E7%AB%8B%E6%8F%92%E4%BB%B6&projects=&template=submit-plugin.yml&title=%5BPlugin%5D%3A+%E8%AF%B7%E6%B1%82%E7%99%BB%E8%AE%B0%E6%96%B0%E6%8F%92%E4%BB%B6">提交插件</a>
|
||||||
|
|
||||||
## 相关链接
|
|
||||||
|
|
||||||
<a href="https://github.com/RockChinQ/qcg-installer">安装器源码</a> |
|
<div align="center">
|
||||||
<a href="https://github.com/RockChinQ/qcg-tester">测试工程源码</a> |
|
😎高稳定、🧩支持扩展、🦄多模态 - 基于大语言模型的即时通讯机器人平台🤖
|
||||||
<a href="https://github.com/RockChinQ/qcg-center">遥测服务端源码</a> |
|
|
||||||
<a href="https://github.com/the-lazy-me/QChatGPT-Wiki">官方文档储存库</a>
|
|
||||||
|
|
||||||
<img alt="回复效果(带有联网插件)" src="https://docs.langbot.top/QChatGPT-0516.png" width="500px"/>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
<a href="http://qm.qq.com/cgi-bin/qm/qr?_wv=1027&k=66-aWvn8cbP4c1ut_1YYkvvGVeEtyTH8&authKey=pTaKBK5C%2B8dFzQ4XlENf6MHTCLaHnlKcCRx7c14EeVVlpX2nRSaS8lJm8YeM4mCU&noverify=0&group_code=195992197">
|
||||||
|
<img alt="Static Badge" src="https://img.shields.io/badge/%E5%AE%98%E6%96%B9%E7%BE%A4-195992197-green">
|
||||||
|
</a>
|
||||||
|
<a href="https://qm.qq.com/q/PClALFK242">
|
||||||
|
<img alt="Static Badge" src="https://img.shields.io/badge/%E7%A4%BE%E5%8C%BA%E7%BE%A4-619154800-green">
|
||||||
|
</a>
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
[](https://github.com/RockChinQ/LangBot/releases/latest)
|
||||||
|

|
||||||
|
<img src="https://img.shields.io/badge/python-3.10 | 3.11 | 3.12-blue.svg" alt="python">
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</p>
|
||||||
|
|
||||||
|
## ✨ Features
|
||||||
|
|
||||||
|
- 💬 大模型对话、Agent:支持多种大模型,适配群聊和私聊;具有多轮对话、工具调用、多模态能力,并深度适配 [Dify](https://dify.ai)。目前支持 QQ、QQ频道,后续还将支持微信、WhatsApp、Discord等平台。
|
||||||
|
- 🛠️ 高稳定性、功能完备:原生支持访问控制、限速、敏感词过滤等机制;配置简单,支持多种部署方式。
|
||||||
|
- 🧩 插件扩展、活跃社区:支持事件驱动、组件扩展等插件机制;丰富生态,目前已有数十个[插件](https://docs.langbot.app/plugin/plugin-intro.html)
|
||||||
|
- 😻 [New] Web 管理面板:支持通过浏览器管理 LangBot 实例,具体支持功能,查看[文档](https://docs.langbot.app/webui/intro.html)
|
||||||
|
|
||||||
|
## 📦 开始使用
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
>
|
||||||
|
> 在您开始任何方式部署之前,请务必阅读[新手指引](https://docs.langbot.app/insight/guide.html)。
|
||||||
|
|
||||||
|
#### Docker Compose 部署
|
||||||
|
|
||||||
|
适合熟悉 Docker 的用户,查看文档[Docker 部署](https://docs.langbot.app/deploy/langbot/docker.html)。
|
||||||
|
|
||||||
|
#### 宝塔面板部署
|
||||||
|
|
||||||
|
已上架宝塔面板,若您已安装宝塔面板,可以根据[文档](https://docs.langbot.app/deploy/langbot/one-click/bt.html)使用。
|
||||||
|
|
||||||
|
#### Zeabur 云部署
|
||||||
|
|
||||||
|
社区贡献的 Zeabur 模板。
|
||||||
|
|
||||||
|
[](https://zeabur.com/zh-CN/templates/ZKTBDH)
|
||||||
|
|
||||||
|
#### Railway 云部署
|
||||||
|
|
||||||
|
[](https://railway.app/template/yRrAyL?referralCode=vogKPF)
|
||||||
|
|
||||||
|
#### 手动部署
|
||||||
|
|
||||||
|
直接使用发行版运行,查看文档[手动部署](https://docs.langbot.app/deploy/langbot/manual.html)。
|
||||||
|
|
||||||
|
## 📸 效果展示
|
||||||
|
|
||||||
|
<img alt="回复效果(带有联网插件)" src="https://docs.langbot.app/QChatGPT-0516.png" width="500px"/>
|
||||||
|
|
||||||
|
- WebUI Demo: https://demo.langbot.dev/
|
||||||
|
- 登录信息:邮箱:`demo@langbot.app` 密码:`langbot123456`
|
||||||
|
- 注意:仅展示webui效果,公开环境,请不要在其中填入您的任何敏感信息。
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ version: "3"
|
|||||||
services:
|
services:
|
||||||
langbot:
|
langbot:
|
||||||
image: rockchin/langbot:latest
|
image: rockchin/langbot:latest
|
||||||
|
container_name: langbot
|
||||||
volumes:
|
volumes:
|
||||||
- ./data:/app/data
|
- ./data:/app/data
|
||||||
- ./plugins:/app/plugins
|
- ./plugins:/app/plugins
|
||||||
|
|||||||
674
libs/LICENSE
Normal file
674
libs/LICENSE
Normal file
@@ -0,0 +1,674 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||||
4
libs/README.md
Normal file
4
libs/README.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# LangBot/libs
|
||||||
|
|
||||||
|
LangBot 项目下的 libs 目录下的所有代码均遵循本目录下的许可证约束。
|
||||||
|
您在使用、修改、分发本目录下的代码时,需要遵守其中包含的条款。
|
||||||
3
libs/dify_service_api/README.md
Normal file
3
libs/dify_service_api/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Dify Service API Python SDK
|
||||||
|
|
||||||
|
这个 SDK 尚不完全支持 Dify Service API 的所有功能。
|
||||||
2
libs/dify_service_api/__init__.py
Normal file
2
libs/dify_service_api/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
from .v1 import client
|
||||||
|
from .v1 import errors
|
||||||
44
libs/dify_service_api/test.py
Normal file
44
libs/dify_service_api/test.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
from v1 import client
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class TestDifyClient:
|
||||||
|
async def test_chat_messages(self):
|
||||||
|
cln = client.AsyncDifyServiceClient(api_key=os.getenv("DIFY_API_KEY"), base_url=os.getenv("DIFY_BASE_URL"))
|
||||||
|
|
||||||
|
async for chunk in cln.chat_messages(inputs={}, query="调用工具查看现在几点?", user="test"):
|
||||||
|
print(json.dumps(chunk, ensure_ascii=False, indent=4))
|
||||||
|
|
||||||
|
async def test_upload_file(self):
|
||||||
|
cln = client.AsyncDifyServiceClient(api_key=os.getenv("DIFY_API_KEY"), base_url=os.getenv("DIFY_BASE_URL"))
|
||||||
|
|
||||||
|
file_bytes = open("img.png", "rb").read()
|
||||||
|
|
||||||
|
print(type(file_bytes))
|
||||||
|
|
||||||
|
file = ("img2.png", file_bytes, "image/png")
|
||||||
|
|
||||||
|
resp = await cln.upload_file(file=file, user="test")
|
||||||
|
print(json.dumps(resp, ensure_ascii=False, indent=4))
|
||||||
|
|
||||||
|
async def test_workflow_run(self):
|
||||||
|
cln = client.AsyncDifyServiceClient(api_key=os.getenv("DIFY_API_KEY"), base_url=os.getenv("DIFY_BASE_URL"))
|
||||||
|
|
||||||
|
# resp = await cln.workflow_run(inputs={}, user="test")
|
||||||
|
# # print(json.dumps(resp, ensure_ascii=False, indent=4))
|
||||||
|
# print(resp)
|
||||||
|
chunks = []
|
||||||
|
|
||||||
|
ignored_events = ['text_chunk']
|
||||||
|
async for chunk in cln.workflow_run(inputs={}, user="test"):
|
||||||
|
if chunk['event'] in ignored_events:
|
||||||
|
continue
|
||||||
|
chunks.append(chunk)
|
||||||
|
print(json.dumps(chunks, ensure_ascii=False, indent=4))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(TestDifyClient().test_chat_messages())
|
||||||
126
libs/dify_service_api/v1/client.py
Normal file
126
libs/dify_service_api/v1/client.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import typing
|
||||||
|
import json
|
||||||
|
|
||||||
|
from .errors import DifyAPIError
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncDifyServiceClient:
|
||||||
|
"""Dify Service API 客户端"""
|
||||||
|
|
||||||
|
api_key: str
|
||||||
|
base_url: str
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
api_key: str,
|
||||||
|
base_url: str = "https://api.dify.ai/v1",
|
||||||
|
) -> None:
|
||||||
|
self.api_key = api_key
|
||||||
|
self.base_url = base_url
|
||||||
|
|
||||||
|
async def chat_messages(
|
||||||
|
self,
|
||||||
|
inputs: dict[str, typing.Any],
|
||||||
|
query: str,
|
||||||
|
user: str,
|
||||||
|
response_mode: str = "streaming", # 当前不支持 blocking
|
||||||
|
conversation_id: str = "",
|
||||||
|
files: list[dict[str, typing.Any]] = [],
|
||||||
|
timeout: float = 30.0,
|
||||||
|
) -> typing.AsyncGenerator[dict[str, typing.Any], None]:
|
||||||
|
"""发送消息"""
|
||||||
|
if response_mode != "streaming":
|
||||||
|
raise DifyAPIError("当前仅支持 streaming 模式")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(
|
||||||
|
base_url=self.base_url,
|
||||||
|
trust_env=True,
|
||||||
|
timeout=timeout,
|
||||||
|
) as client:
|
||||||
|
async with client.stream(
|
||||||
|
"POST",
|
||||||
|
"/chat-messages",
|
||||||
|
headers={"Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json"},
|
||||||
|
json={
|
||||||
|
"inputs": inputs,
|
||||||
|
"query": query,
|
||||||
|
"user": user,
|
||||||
|
"response_mode": response_mode,
|
||||||
|
"conversation_id": conversation_id,
|
||||||
|
"files": files,
|
||||||
|
},
|
||||||
|
) as r:
|
||||||
|
async for chunk in r.aiter_lines():
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise DifyAPIError(f"{r.status_code} {chunk}")
|
||||||
|
if chunk.strip() == "":
|
||||||
|
continue
|
||||||
|
if chunk.startswith("data:"):
|
||||||
|
yield json.loads(chunk[5:])
|
||||||
|
|
||||||
|
async def workflow_run(
|
||||||
|
self,
|
||||||
|
inputs: dict[str, typing.Any],
|
||||||
|
user: str,
|
||||||
|
response_mode: str = "streaming", # 当前不支持 blocking
|
||||||
|
files: list[dict[str, typing.Any]] = [],
|
||||||
|
timeout: float = 30.0,
|
||||||
|
) -> typing.AsyncGenerator[dict[str, typing.Any], None]:
|
||||||
|
"""运行工作流"""
|
||||||
|
if response_mode != "streaming":
|
||||||
|
raise DifyAPIError("当前仅支持 streaming 模式")
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(
|
||||||
|
base_url=self.base_url,
|
||||||
|
trust_env=True,
|
||||||
|
timeout=timeout,
|
||||||
|
) as client:
|
||||||
|
|
||||||
|
async with client.stream(
|
||||||
|
"POST",
|
||||||
|
"/workflows/run",
|
||||||
|
headers={"Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json"},
|
||||||
|
json={
|
||||||
|
"inputs": inputs,
|
||||||
|
"user": user,
|
||||||
|
"response_mode": response_mode,
|
||||||
|
"files": files,
|
||||||
|
},
|
||||||
|
) as r:
|
||||||
|
async for chunk in r.aiter_lines():
|
||||||
|
if r.status_code != 200:
|
||||||
|
raise DifyAPIError(f"{r.status_code} {chunk}")
|
||||||
|
if chunk.strip() == "":
|
||||||
|
continue
|
||||||
|
if chunk.startswith("data:"):
|
||||||
|
yield json.loads(chunk[5:])
|
||||||
|
|
||||||
|
async def upload_file(
|
||||||
|
self,
|
||||||
|
file: httpx._types.FileTypes,
|
||||||
|
user: str,
|
||||||
|
timeout: float = 30.0,
|
||||||
|
) -> str:
|
||||||
|
"""上传文件"""
|
||||||
|
async with httpx.AsyncClient(
|
||||||
|
base_url=self.base_url,
|
||||||
|
trust_env=True,
|
||||||
|
timeout=timeout,
|
||||||
|
) as client:
|
||||||
|
# multipart/form-data
|
||||||
|
response = await client.post(
|
||||||
|
"/files/upload",
|
||||||
|
headers={"Authorization": f"Bearer {self.api_key}"},
|
||||||
|
files={
|
||||||
|
"file": file,
|
||||||
|
"user": (None, user),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code != 201:
|
||||||
|
raise DifyAPIError(f"{response.status_code} {response.text}")
|
||||||
|
|
||||||
|
return response.json()
|
||||||
17
libs/dify_service_api/v1/client_test.py
Normal file
17
libs/dify_service_api/v1/client_test.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from . import client
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class TestDifyClient:
|
||||||
|
async def test_chat_messages(self):
|
||||||
|
cln = client.DifyClient(api_key=os.getenv("DIFY_API_KEY"))
|
||||||
|
|
||||||
|
resp = await cln.chat_messages(inputs={}, query="Who are you?", user_id="test")
|
||||||
|
print(resp)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(TestDifyClient().test_chat_messages())
|
||||||
6
libs/dify_service_api/v1/errors.py
Normal file
6
libs/dify_service_api/v1/errors.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
class DifyAPIError(Exception):
|
||||||
|
"""Dify API 请求失败"""
|
||||||
|
|
||||||
|
def __init__(self, message: str):
|
||||||
|
self.message = message
|
||||||
|
super().__init__(self.message)
|
||||||
10
main.py
10
main.py
@@ -36,6 +36,12 @@ async def main_entry(loop: asyncio.AbstractEventLoop):
|
|||||||
print("已自动安装缺失的依赖包,请重启程序。")
|
print("已自动安装缺失的依赖包,请重启程序。")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
# 检查pydantic版本,如果没有 pydantic.v1,则把 pydantic 映射为 v1
|
||||||
|
import pydantic.version
|
||||||
|
if pydantic.version.VERSION < '2.0':
|
||||||
|
import pydantic
|
||||||
|
sys.modules['pydantic.v1'] = pydantic
|
||||||
|
|
||||||
# 检查配置文件
|
# 检查配置文件
|
||||||
|
|
||||||
from pkg.core.bootutils import files
|
from pkg.core.bootutils import files
|
||||||
@@ -43,12 +49,10 @@ async def main_entry(loop: asyncio.AbstractEventLoop):
|
|||||||
generated_files = await files.generate_files()
|
generated_files = await files.generate_files()
|
||||||
|
|
||||||
if generated_files:
|
if generated_files:
|
||||||
print("以下文件不存在,已自动生成,请按需修改配置文件后重启:")
|
print("以下文件不存在,已自动生成:")
|
||||||
for file in generated_files:
|
for file in generated_files:
|
||||||
print("-", file)
|
print("-", file)
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
from pkg.core import boot
|
from pkg.core import boot
|
||||||
await boot.main(loop)
|
await boot.main(loop)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import quart
|
import quart
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
import argon2
|
||||||
|
|
||||||
from .. import group
|
from .. import group
|
||||||
from .....persistence.entities import user
|
from .....persistence.entities import user
|
||||||
@@ -32,7 +33,10 @@ class UserRouterGroup(group.RouterGroup):
|
|||||||
async def _() -> str:
|
async def _() -> str:
|
||||||
json_data = await quart.request.json
|
json_data = await quart.request.json
|
||||||
|
|
||||||
token = await self.ap.user_service.authenticate(json_data['user'], json_data['password'])
|
try:
|
||||||
|
token = await self.ap.user_service.authenticate(json_data['user'], json_data['password'])
|
||||||
|
except argon2.exceptions.VerifyMismatchError:
|
||||||
|
return self.fail(1, '用户名或密码错误')
|
||||||
|
|
||||||
return self.success(data={
|
return self.success(data={
|
||||||
'token': token
|
'token': token
|
||||||
|
|||||||
@@ -51,8 +51,7 @@ class UserService:
|
|||||||
|
|
||||||
ph = argon2.PasswordHasher()
|
ph = argon2.PasswordHasher()
|
||||||
|
|
||||||
if not ph.verify(user_obj.password, password):
|
ph.verify(user_obj.password, password)
|
||||||
raise ValueError('密码错误')
|
|
||||||
|
|
||||||
return await self.generate_jwt_token(user_email)
|
return await self.generate_jwt_token(user_email)
|
||||||
|
|
||||||
|
|||||||
@@ -13,14 +13,14 @@ identifier = {
|
|||||||
'instance_create_ts': 0,
|
'instance_create_ts': 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
HOST_ID_FILE = os.path.expanduser('~/.qchatgpt/host_id.json')
|
HOST_ID_FILE = os.path.expanduser('~/.langbot/host_id.json')
|
||||||
INSTANCE_ID_FILE = 'res/instance_id.json'
|
INSTANCE_ID_FILE = 'data/labels/instance_id.json'
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
global identifier
|
global identifier
|
||||||
|
|
||||||
if not os.path.exists(os.path.expanduser('~/.qchatgpt')):
|
if not os.path.exists(os.path.expanduser('~/.langbot')):
|
||||||
os.mkdir(os.path.expanduser('~/.qchatgpt'))
|
os.mkdir(os.path.expanduser('~/.langbot'))
|
||||||
|
|
||||||
if not os.path.exists(HOST_ID_FILE):
|
if not os.path.exists(HOST_ID_FILE):
|
||||||
new_host_id = 'host_'+str(uuid.uuid4())
|
new_host_id = 'host_'+str(uuid.uuid4())
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ..core import app, entities as core_entities
|
from ..core import app, entities as core_entities
|
||||||
from . import errors, operator
|
from . import errors, operator
|
||||||
|
|||||||
@@ -143,9 +143,7 @@ class Application:
|
|||||||
self.logger.warning("WebUI 文件缺失,请根据文档获取:https://docs.langbot.app/webui/intro.html")
|
self.logger.warning("WebUI 文件缺失,请根据文档获取:https://docs.langbot.app/webui/intro.html")
|
||||||
return
|
return
|
||||||
|
|
||||||
import socket
|
host_ip = "127.0.0.1"
|
||||||
|
|
||||||
host_ip = socket.gethostbyname(socket.gethostname())
|
|
||||||
|
|
||||||
public_ip = await ip.get_myip()
|
public_ip = await ip.get_myip()
|
||||||
|
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ required_paths = [
|
|||||||
"data/scenario",
|
"data/scenario",
|
||||||
"data/logs",
|
"data/logs",
|
||||||
"data/config",
|
"data/config",
|
||||||
|
"data/labels",
|
||||||
"plugins"
|
"plugins"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -42,8 +42,11 @@ async def init_logging(extra_handlers: list[logging.Handler] = None) -> logging.
|
|||||||
)
|
)
|
||||||
|
|
||||||
stream_handler = logging.StreamHandler(sys.stdout)
|
stream_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
# stream_handler.setLevel(level)
|
||||||
|
# stream_handler.setFormatter(color_formatter)
|
||||||
|
stream_handler.stream = open(sys.stdout.fileno(), mode='w', encoding='utf-8', buffering=1)
|
||||||
|
|
||||||
log_handlers: list[logging.Handler] = [stream_handler, logging.FileHandler(log_file_name)]
|
log_handlers: list[logging.Handler] = [stream_handler, logging.FileHandler(log_file_name, encoding='utf-8')]
|
||||||
log_handlers += extra_handlers if extra_handlers is not None else []
|
log_handlers += extra_handlers if extra_handlers is not None else []
|
||||||
|
|
||||||
for handler in log_handlers:
|
for handler in log_handlers:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import typing
|
|||||||
import datetime
|
import datetime
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ..provider import entities as llm_entities
|
from ..provider import entities as llm_entities
|
||||||
from ..provider.modelmgr import entities
|
from ..provider.modelmgr import entities
|
||||||
@@ -91,7 +91,7 @@ class Query(pydantic.BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class Conversation(pydantic.BaseModel):
|
class Conversation(pydantic.BaseModel):
|
||||||
"""对话,包含于 Session 中,一个 Session 可以有多个历史 Conversation,但只有一个当前使用的 Conversation"""
|
"""对话,包含于 Session 中,一个 Session 可以有多个历史 Conversation,但只有一个当前使用的 Conversation"""
|
||||||
|
|
||||||
prompt: sysprompt_entities.Prompt
|
prompt: sysprompt_entities.Prompt
|
||||||
|
|
||||||
@@ -105,6 +105,9 @@ class Conversation(pydantic.BaseModel):
|
|||||||
|
|
||||||
use_funcs: typing.Optional[list[tools_entities.LLMFunction]]
|
use_funcs: typing.Optional[list[tools_entities.LLMFunction]]
|
||||||
|
|
||||||
|
uuid: typing.Optional[str] = None
|
||||||
|
"""该对话的 uuid,在创建时不会自动生成。而是当使用 Dify API 等由外部管理对话信息的服务时,用于绑定外部的会话。具体如何使用,取决于 Runner。"""
|
||||||
|
|
||||||
|
|
||||||
class Session(pydantic.BaseModel):
|
class Session(pydantic.BaseModel):
|
||||||
"""会话,一个 Session 对应一个 {launcher_type.value}_{launcher_id}"""
|
"""会话,一个 Session 对应一个 {launcher_type.value}_{launcher_id}"""
|
||||||
|
|||||||
26
pkg/core/migrations/m015_gitee_ai_config.py
Normal file
26
pkg/core/migrations/m015_gitee_ai_config.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .. import migration
|
||||||
|
|
||||||
|
|
||||||
|
@migration.migration_class("gitee-ai-config", 15)
|
||||||
|
class GiteeAIConfigMigration(migration.Migration):
|
||||||
|
"""迁移"""
|
||||||
|
|
||||||
|
async def need_migrate(self) -> bool:
|
||||||
|
"""判断当前环境是否需要运行此迁移"""
|
||||||
|
return 'gitee-ai-chat-completions' not in self.ap.provider_cfg.data['requester'] or 'gitee-ai' not in self.ap.provider_cfg.data['keys']
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
"""执行迁移"""
|
||||||
|
self.ap.provider_cfg.data['requester']['gitee-ai-chat-completions'] = {
|
||||||
|
"base-url": "https://ai.gitee.com/v1",
|
||||||
|
"args": {},
|
||||||
|
"timeout": 120
|
||||||
|
}
|
||||||
|
|
||||||
|
self.ap.provider_cfg.data['keys']['gitee-ai'] = [
|
||||||
|
"XXXXX"
|
||||||
|
]
|
||||||
|
|
||||||
|
await self.ap.provider_cfg.dump_config()
|
||||||
28
pkg/core/migrations/m016_dify_service_api.py
Normal file
28
pkg/core/migrations/m016_dify_service_api.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .. import migration
|
||||||
|
|
||||||
|
|
||||||
|
@migration.migration_class("dify-service-api-config", 16)
|
||||||
|
class DifyServiceAPICfgMigration(migration.Migration):
|
||||||
|
"""迁移"""
|
||||||
|
|
||||||
|
async def need_migrate(self) -> bool:
|
||||||
|
"""判断当前环境是否需要运行此迁移"""
|
||||||
|
return 'dify-service-api' not in self.ap.provider_cfg.data
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
"""执行迁移"""
|
||||||
|
self.ap.provider_cfg.data['dify-service-api'] = {
|
||||||
|
"base-url": "https://api.dify.ai/v1",
|
||||||
|
"app-type": "chat",
|
||||||
|
"chat": {
|
||||||
|
"api-key": "app-1234567890"
|
||||||
|
},
|
||||||
|
"workflow": {
|
||||||
|
"api-key": "app-1234567890",
|
||||||
|
"output-key": "summary"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await self.ap.provider_cfg.dump_config()
|
||||||
24
pkg/core/migrations/m017_dify_api_timeout_params.py
Normal file
24
pkg/core/migrations/m017_dify_api_timeout_params.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .. import migration
|
||||||
|
|
||||||
|
|
||||||
|
@migration.migration_class("dify-api-timeout-params", 17)
|
||||||
|
class DifyAPITimeoutParamsMigration(migration.Migration):
|
||||||
|
"""迁移"""
|
||||||
|
|
||||||
|
async def need_migrate(self) -> bool:
|
||||||
|
"""判断当前环境是否需要运行此迁移"""
|
||||||
|
return 'timeout' not in self.ap.provider_cfg.data['dify-service-api']['chat'] or 'timeout' not in self.ap.provider_cfg.data['dify-service-api']['workflow'] \
|
||||||
|
or 'agent' not in self.ap.provider_cfg.data['dify-service-api']
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
"""执行迁移"""
|
||||||
|
self.ap.provider_cfg.data['dify-service-api']['chat']['timeout'] = 120
|
||||||
|
self.ap.provider_cfg.data['dify-service-api']['workflow']['timeout'] = 120
|
||||||
|
self.ap.provider_cfg.data['dify-service-api']['agent'] = {
|
||||||
|
"api-key": "app-1234567890",
|
||||||
|
"timeout": 120
|
||||||
|
}
|
||||||
|
|
||||||
|
await self.ap.provider_cfg.dump_config()
|
||||||
@@ -7,6 +7,7 @@ from .. import migration
|
|||||||
from ..migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion
|
from ..migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion
|
||||||
from ..migrations import m005_deepseek_cfg_completion, m006_vision_config, m007_qcg_center_url, m008_ad_fixwin_config_migrate, m009_msg_truncator_cfg
|
from ..migrations import m005_deepseek_cfg_completion, m006_vision_config, m007_qcg_center_url, m008_ad_fixwin_config_migrate, m009_msg_truncator_cfg
|
||||||
from ..migrations import m010_ollama_requester_config, m011_command_prefix_config, m012_runner_config, m013_http_api_config, m014_force_delay_config
|
from ..migrations import m010_ollama_requester_config, m011_command_prefix_config, m012_runner_config, m013_http_api_config, m014_force_delay_config
|
||||||
|
from ..migrations import m015_gitee_ai_config, m016_dify_service_api, m017_dify_api_timeout_params
|
||||||
|
|
||||||
|
|
||||||
@stage.stage_class("MigrationStage")
|
@stage.stage_class("MigrationStage")
|
||||||
@@ -28,3 +29,4 @@ class MigrationStage(stage.BootingStage):
|
|||||||
|
|
||||||
if await migration_instance.need_migrate():
|
if await migration_instance.need_migrate():
|
||||||
await migration_instance.run()
|
await migration_instance.run()
|
||||||
|
print(f'已执行迁移 {migration_instance.name}')
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import typing
|
import typing
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ...provider import entities as llm_entities
|
from ...provider import entities as llm_entities
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|||||||
import enum
|
import enum
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
from ..platform.types import message as platform_message
|
from ..platform.types import message as platform_message
|
||||||
|
|
||||||
from ..core import entities
|
from ..core import entities
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from .. import strategy as strategy_model
|
from .. import strategy as strategy_model
|
||||||
from ....core import entities as core_entities
|
from ....core import entities as core_entities
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class Text2ImageStrategy(strategy_model.LongTextStrategy):
|
|||||||
"""
|
"""
|
||||||
kv = []
|
kv = []
|
||||||
nums = []
|
nums = []
|
||||||
beforeDatas = re.findall('[\d]+', path)
|
beforeDatas = re.findall('[\\d]+', path)
|
||||||
for num in beforeDatas:
|
for num in beforeDatas:
|
||||||
indexV = []
|
indexV = []
|
||||||
times = path.count(num)
|
times = path.count(num)
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ class PreProcessor(stage.PipelineStage):
|
|||||||
|
|
||||||
|
|
||||||
# 检查vision是否启用,没启用就删除所有图片
|
# 检查vision是否启用,没启用就删除所有图片
|
||||||
if not self.ap.provider_cfg.data['enable-vision'] or not query.use_model.vision_supported:
|
if not self.ap.provider_cfg.data['enable-vision'] or (self.ap.provider_cfg.data['runner'] == 'local-agent' and not query.use_model.vision_supported):
|
||||||
for msg in query.messages:
|
for msg in query.messages:
|
||||||
if isinstance(msg.content, list):
|
if isinstance(msg.content, list):
|
||||||
for me in msg.content:
|
for me in msg.content:
|
||||||
@@ -60,13 +60,13 @@ class PreProcessor(stage.PipelineStage):
|
|||||||
llm_entities.ContentElement.from_text(me.text)
|
llm_entities.ContentElement.from_text(me.text)
|
||||||
)
|
)
|
||||||
elif isinstance(me, platform_message.Image):
|
elif isinstance(me, platform_message.Image):
|
||||||
if self.ap.provider_cfg.data['enable-vision'] and query.use_model.vision_supported:
|
if self.ap.provider_cfg.data['enable-vision'] and (self.ap.provider_cfg.data['runner'] != 'local-agent' or query.use_model.vision_supported):
|
||||||
if me.url is not None:
|
if me.base64 is not None:
|
||||||
content_list.append(
|
content_list.append(
|
||||||
llm_entities.ContentElement.from_image_url(str(me.url))
|
llm_entities.ContentElement.from_image_base64(me.base64)
|
||||||
)
|
)
|
||||||
|
|
||||||
query.user_message = llm_entities.Message( # TODO 适配多模态输入
|
query.user_message = llm_entities.Message(
|
||||||
role='user',
|
role='user',
|
||||||
content=content_list
|
content=content_list
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ class ChatMessageHandler(handler.MessageHandler):
|
|||||||
query.session.using_conversation.messages.extend(query.resp_messages)
|
query.session.using_conversation.messages.extend(query.resp_messages)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
||||||
self.ap.logger.error(f'对话({query.query_id})请求失败: {str(e)}')
|
self.ap.logger.error(f'对话({query.query_id})请求失败: {type(e).__name__} {str(e)}')
|
||||||
|
|
||||||
yield entities.StageProcessResult(
|
yield entities.StageProcessResult(
|
||||||
result_type=entities.ResultType.INTERRUPT,
|
result_type=entities.ResultType.INTERRUPT,
|
||||||
@@ -105,7 +105,7 @@ class ChatMessageHandler(handler.MessageHandler):
|
|||||||
await self.ap.ctr_mgr.usage.post_query_record(
|
await self.ap.ctr_mgr.usage.post_query_record(
|
||||||
session_type=query.session.launcher_type.value,
|
session_type=query.session.launcher_type.value,
|
||||||
session_id=str(query.session.launcher_id),
|
session_id=str(query.session.launcher_id),
|
||||||
query_ability_provider="QChatGPT.Chat",
|
query_ability_provider="LangBot.Chat",
|
||||||
usage=text_length,
|
usage=text_length,
|
||||||
model_name=query.use_model.name,
|
model_name=query.use_model.name,
|
||||||
response_seconds=int(time.time() - start_time),
|
response_seconds=int(time.time() - start_time),
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ...platform.types import message as platform_message
|
from ...platform.types import message as platform_message
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import pydantic
|
|
||||||
|
|
||||||
from ..core import app
|
from ..core import app
|
||||||
from . import stage
|
from . import stage
|
||||||
from .resprule import resprule
|
from .resprule import resprule
|
||||||
|
|||||||
@@ -50,17 +50,6 @@ class PlatformManager:
|
|||||||
adapter=adapter
|
adapter=adapter
|
||||||
)
|
)
|
||||||
|
|
||||||
async def on_stranger_message(event: platform_events.StrangerMessage, adapter: msadapter.MessageSourceAdapter):
|
|
||||||
|
|
||||||
await self.ap.query_pool.add_query(
|
|
||||||
launcher_type=core_entities.LauncherTypes.PERSON,
|
|
||||||
launcher_id=event.sender.id,
|
|
||||||
sender_id=event.sender.id,
|
|
||||||
message_event=event,
|
|
||||||
message_chain=event.message_chain,
|
|
||||||
adapter=adapter
|
|
||||||
)
|
|
||||||
|
|
||||||
async def on_group_message(event: platform_events.GroupMessage, adapter: msadapter.MessageSourceAdapter):
|
async def on_group_message(event: platform_events.GroupMessage, adapter: msadapter.MessageSourceAdapter):
|
||||||
|
|
||||||
await self.ap.query_pool.add_query(
|
await self.ap.query_pool.add_query(
|
||||||
@@ -96,12 +85,6 @@ class PlatformManager:
|
|||||||
)
|
)
|
||||||
self.adapters.append(adapter_inst)
|
self.adapters.append(adapter_inst)
|
||||||
|
|
||||||
if adapter_name == 'yiri-mirai':
|
|
||||||
adapter_inst.register_listener(
|
|
||||||
platform_events.StrangerMessage,
|
|
||||||
on_stranger_message
|
|
||||||
)
|
|
||||||
|
|
||||||
adapter_inst.register_listener(
|
adapter_inst.register_listener(
|
||||||
platform_events.FriendMessage,
|
platform_events.FriendMessage,
|
||||||
on_friend_message
|
on_friend_message
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import time
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import aiocqhttp
|
import aiocqhttp
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
from .. import adapter
|
from .. import adapter
|
||||||
from ...pipeline.longtext.strategies import forward
|
from ...pipeline.longtext.strategies import forward
|
||||||
@@ -13,12 +14,12 @@ from ...core import app
|
|||||||
from ..types import message as platform_message
|
from ..types import message as platform_message
|
||||||
from ..types import events as platform_events
|
from ..types import events as platform_events
|
||||||
from ..types import entities as platform_entities
|
from ..types import entities as platform_entities
|
||||||
|
from ...utils import image
|
||||||
|
|
||||||
class AiocqhttpMessageConverter(adapter.MessageConverter):
|
class AiocqhttpMessageConverter(adapter.MessageConverter):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def yiri2target(message_chain: platform_message.MessageChain) -> typing.Tuple[list, int, datetime.datetime]:
|
async def yiri2target(message_chain: platform_message.MessageChain) -> typing.Tuple[list, int, datetime.datetime]:
|
||||||
msg_list = aiocqhttp.Message()
|
msg_list = aiocqhttp.Message()
|
||||||
|
|
||||||
msg_id = 0
|
msg_id = 0
|
||||||
@@ -59,7 +60,7 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
|||||||
elif type(msg) is forward.Forward:
|
elif type(msg) is forward.Forward:
|
||||||
|
|
||||||
for node in msg.node_list:
|
for node in msg.node_list:
|
||||||
msg_list.extend(AiocqhttpMessageConverter.yiri2target(node.message_chain)[0])
|
msg_list.extend((await AiocqhttpMessageConverter.yiri2target(node.message_chain))[0])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
msg_list.append(aiocqhttp.MessageSegment.text(str(msg)))
|
msg_list.append(aiocqhttp.MessageSegment.text(str(msg)))
|
||||||
@@ -67,7 +68,7 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
|||||||
return msg_list, msg_id, msg_time
|
return msg_list, msg_id, msg_time
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def target2yiri(message: str, message_id: int = -1):
|
async def target2yiri(message: str, message_id: int = -1):
|
||||||
message = aiocqhttp.Message(message)
|
message = aiocqhttp.Message(message)
|
||||||
|
|
||||||
yiri_msg_list = []
|
yiri_msg_list = []
|
||||||
@@ -89,7 +90,8 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
|||||||
elif msg.type == "text":
|
elif msg.type == "text":
|
||||||
yiri_msg_list.append(platform_message.Plain(text=msg.data["text"]))
|
yiri_msg_list.append(platform_message.Plain(text=msg.data["text"]))
|
||||||
elif msg.type == "image":
|
elif msg.type == "image":
|
||||||
yiri_msg_list.append(platform_message.Image(url=msg.data["url"]))
|
image_base64, image_format = await image.qq_image_url_to_base64(msg.data['url'])
|
||||||
|
yiri_msg_list.append(platform_message.Image(base64=f"data:image/{image_format};base64,{image_base64}"))
|
||||||
|
|
||||||
chain = platform_message.MessageChain(yiri_msg_list)
|
chain = platform_message.MessageChain(yiri_msg_list)
|
||||||
|
|
||||||
@@ -99,9 +101,9 @@ class AiocqhttpMessageConverter(adapter.MessageConverter):
|
|||||||
class AiocqhttpEventConverter(adapter.EventConverter):
|
class AiocqhttpEventConverter(adapter.EventConverter):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def yiri2target(event: platform_events.Event, bot_account_id: int):
|
async def yiri2target(event: platform_events.Event, bot_account_id: int):
|
||||||
|
|
||||||
msg, msg_id, msg_time = AiocqhttpMessageConverter.yiri2target(event.message_chain)
|
msg, msg_id, msg_time = await AiocqhttpMessageConverter.yiri2target(event.message_chain)
|
||||||
|
|
||||||
if type(event) is platform_events.GroupMessage:
|
if type(event) is platform_events.GroupMessage:
|
||||||
role = "member"
|
role = "member"
|
||||||
@@ -164,8 +166,8 @@ class AiocqhttpEventConverter(adapter.EventConverter):
|
|||||||
return aiocqhttp.Event.from_payload(payload)
|
return aiocqhttp.Event.from_payload(payload)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def target2yiri(event: aiocqhttp.Event):
|
async def target2yiri(event: aiocqhttp.Event):
|
||||||
yiri_chain = AiocqhttpMessageConverter.target2yiri(
|
yiri_chain = await AiocqhttpMessageConverter.target2yiri(
|
||||||
event.message, event.message_id
|
event.message, event.message_id
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -242,7 +244,7 @@ class AiocqhttpAdapter(adapter.MessageSourceAdapter):
|
|||||||
async def send_message(
|
async def send_message(
|
||||||
self, target_type: str, target_id: str, message: platform_message.MessageChain
|
self, target_type: str, target_id: str, message: platform_message.MessageChain
|
||||||
):
|
):
|
||||||
aiocq_msg = AiocqhttpMessageConverter.yiri2target(message)[0]
|
aiocq_msg = (await AiocqhttpMessageConverter.yiri2target(message))[0]
|
||||||
|
|
||||||
if target_type == "group":
|
if target_type == "group":
|
||||||
await self.bot.send_group_msg(group_id=int(target_id), message=aiocq_msg)
|
await self.bot.send_group_msg(group_id=int(target_id), message=aiocq_msg)
|
||||||
@@ -255,8 +257,8 @@ class AiocqhttpAdapter(adapter.MessageSourceAdapter):
|
|||||||
message: platform_message.MessageChain,
|
message: platform_message.MessageChain,
|
||||||
quote_origin: bool = False,
|
quote_origin: bool = False,
|
||||||
):
|
):
|
||||||
aiocq_event = AiocqhttpEventConverter.yiri2target(message_source, self.bot_account_id)
|
aiocq_event = await AiocqhttpEventConverter.yiri2target(message_source, self.bot_account_id)
|
||||||
aiocq_msg = AiocqhttpMessageConverter.yiri2target(message)[0]
|
aiocq_msg = (await AiocqhttpMessageConverter.yiri2target(message))[0]
|
||||||
if quote_origin:
|
if quote_origin:
|
||||||
aiocq_msg = aiocqhttp.MessageSegment.reply(aiocq_event.message_id) + aiocq_msg
|
aiocq_msg = aiocqhttp.MessageSegment.reply(aiocq_event.message_id) + aiocq_msg
|
||||||
|
|
||||||
@@ -276,7 +278,7 @@ class AiocqhttpAdapter(adapter.MessageSourceAdapter):
|
|||||||
async def on_message(event: aiocqhttp.Event):
|
async def on_message(event: aiocqhttp.Event):
|
||||||
self.bot_account_id = event.self_id
|
self.bot_account_id = event.self_id
|
||||||
try:
|
try:
|
||||||
return await callback(self.event_converter.target2yiri(event), self)
|
return await callback(await self.event_converter.target2yiri(event), self)
|
||||||
except:
|
except:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ import traceback
|
|||||||
import botpy
|
import botpy
|
||||||
import botpy.message as botpy_message
|
import botpy.message as botpy_message
|
||||||
import botpy.types.message as botpy_message_type
|
import botpy.types.message as botpy_message_type
|
||||||
import pydantic
|
|
||||||
import pydantic.networks
|
|
||||||
|
|
||||||
from .. import adapter as adapter_model
|
from .. import adapter as adapter_model
|
||||||
from ...pipeline.longtext.strategies import forward
|
from ...pipeline.longtext.strategies import forward
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
|
|
||||||
from typing import Dict, List, Type
|
from typing import Dict, List, Type
|
||||||
|
|
||||||
import pydantic.main as pdm
|
import pydantic.v1.main as pdm
|
||||||
from pydantic import BaseModel
|
from pydantic.v1 import BaseModel
|
||||||
|
|
||||||
|
|
||||||
class PlatformMetaclass(pdm.ModelMetaclass):
|
class PlatformMetaclass(pdm.ModelMetaclass):
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from datetime import datetime
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
|
|
||||||
class Entity(pydantic.BaseModel):
|
class Entity(pydantic.BaseModel):
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from datetime import datetime
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from . import entities as platform_entities
|
from . import entities as platform_entities
|
||||||
from . import message as platform_message
|
from . import message as platform_message
|
||||||
|
|||||||
@@ -5,8 +5,7 @@ from enum import Enum
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
import pydantic.main
|
|
||||||
|
|
||||||
from . import entities as platform_entities
|
from . import entities as platform_entities
|
||||||
from .base import PlatformBaseModel, PlatformIndexedMetaclass, PlatformIndexedModel
|
from .base import PlatformBaseModel, PlatformIndexedMetaclass, PlatformIndexedModel
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
import abc
|
import abc
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
import enum
|
import enum
|
||||||
|
|
||||||
from . import events
|
from . import events
|
||||||
@@ -127,16 +127,16 @@ class APIHost:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: 是否满足要求, False时为无法获取版本号,True时为满足要求,报错为不满足要求
|
bool: 是否满足要求, False时为无法获取版本号,True时为满足要求,报错为不满足要求
|
||||||
"""
|
"""
|
||||||
qchatgpt_version = ""
|
langbot_version = ""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
qchatgpt_version = self.ap.ver_mgr.get_current_version() # 从updater模块获取版本号
|
langbot_version = self.ap.ver_mgr.get_current_version() # 从updater模块获取版本号
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.ap.ver_mgr.compare_version_str(qchatgpt_version, ge) < 0 or \
|
if self.ap.ver_mgr.compare_version_str(langbot_version, ge) < 0 or \
|
||||||
(self.ap.ver_mgr.compare_version_str(qchatgpt_version, le) > 0):
|
(self.ap.ver_mgr.compare_version_str(langbot_version, le) > 0):
|
||||||
raise Exception("LangBot 版本不满足要求,某些功能(可能是由插件提供的)无法正常使用。(要求版本:{}-{},但当前版本:{})".format(ge, le, qchatgpt_version))
|
raise Exception("LangBot 版本不满足要求,某些功能(可能是由插件提供的)无法正常使用。(要求版本:{}-{},但当前版本:{})".format(ge, le, langbot_version))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ..core import entities as core_entities
|
from ..core import entities as core_entities
|
||||||
from ..provider import entities as llm_entities
|
from ..provider import entities as llm_entities
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
import enum
|
import enum
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
|
|
||||||
from ..platform.types import message as platform_message
|
from ..platform.types import message as platform_message
|
||||||
@@ -38,6 +38,8 @@ class ContentElement(pydantic.BaseModel):
|
|||||||
|
|
||||||
image_url: typing.Optional[ImageURLContentObject] = None
|
image_url: typing.Optional[ImageURLContentObject] = None
|
||||||
|
|
||||||
|
image_base64: typing.Optional[str] = None
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.type == 'text':
|
if self.type == 'text':
|
||||||
return self.text
|
return self.text
|
||||||
@@ -53,6 +55,10 @@ class ContentElement(pydantic.BaseModel):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def from_image_url(cls, image_url: str):
|
def from_image_url(cls, image_url: str):
|
||||||
return cls(type='image_url', image_url=ImageURLContentObject(url=image_url))
|
return cls(type='image_url', image_url=ImageURLContentObject(url=image_url))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_image_base64(cls, image_base64: str):
|
||||||
|
return cls(type='image_base64', image_base64=image_base64)
|
||||||
|
|
||||||
|
|
||||||
class Message(pydantic.BaseModel):
|
class Message(pydantic.BaseModel):
|
||||||
|
|||||||
@@ -2,9 +2,9 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from . import api
|
from . import requester
|
||||||
from . import token
|
from . import token
|
||||||
|
|
||||||
|
|
||||||
@@ -17,7 +17,7 @@ class LLMModelInfo(pydantic.BaseModel):
|
|||||||
|
|
||||||
token_mgr: token.TokenManager
|
token_mgr: token.TokenManager
|
||||||
|
|
||||||
requester: api.LLMAPIRequester
|
requester: requester.LLMAPIRequester
|
||||||
|
|
||||||
tool_call_supported: typing.Optional[bool] = False
|
tool_call_supported: typing.Optional[bool] = False
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,11 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
from . import entities
|
from . import entities, requester
|
||||||
from ...core import app
|
from ...core import app
|
||||||
|
|
||||||
from . import token, api
|
from . import token
|
||||||
from .apis import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat
|
from .requesters import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat, giteeaichatcmpl
|
||||||
|
|
||||||
FETCH_MODEL_LIST_URL = "https://api.qchatgpt.rockchin.top/api/v2/fetch/model_list"
|
FETCH_MODEL_LIST_URL = "https://api.qchatgpt.rockchin.top/api/v2/fetch/model_list"
|
||||||
|
|
||||||
@@ -18,7 +18,7 @@ class ModelManager:
|
|||||||
|
|
||||||
model_list: list[entities.LLMModelInfo]
|
model_list: list[entities.LLMModelInfo]
|
||||||
|
|
||||||
requesters: dict[str, api.LLMAPIRequester]
|
requesters: dict[str, requester.LLMAPIRequester]
|
||||||
|
|
||||||
token_mgrs: dict[str, token.TokenManager]
|
token_mgrs: dict[str, token.TokenManager]
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ class ModelManager:
|
|||||||
for k, v in self.ap.provider_cfg.data['keys'].items():
|
for k, v in self.ap.provider_cfg.data['keys'].items():
|
||||||
self.token_mgrs[k] = token.TokenManager(k, v)
|
self.token_mgrs[k] = token.TokenManager(k, v)
|
||||||
|
|
||||||
for api_cls in api.preregistered_requesters:
|
for api_cls in requester.preregistered_requesters:
|
||||||
api_inst = api_cls(self.ap)
|
api_inst = api_cls(self.ap)
|
||||||
await api_inst.initialize()
|
await api_inst.initialize()
|
||||||
self.requesters[api_inst.name] = api_inst
|
self.requesters[api_inst.name] = api_inst
|
||||||
@@ -94,7 +94,7 @@ class ModelManager:
|
|||||||
|
|
||||||
model_name = model.get('model_name', default_model_info.model_name)
|
model_name = model.get('model_name', default_model_info.model_name)
|
||||||
token_mgr = self.token_mgrs[model['token_mgr']] if 'token_mgr' in model else default_model_info.token_mgr
|
token_mgr = self.token_mgrs[model['token_mgr']] if 'token_mgr' in model else default_model_info.token_mgr
|
||||||
requester = self.requesters[model['requester']] if 'requester' in model else default_model_info.requester
|
req = self.requesters[model['requester']] if 'requester' in model else default_model_info.requester
|
||||||
tool_call_supported = model.get('tool_call_supported', default_model_info.tool_call_supported)
|
tool_call_supported = model.get('tool_call_supported', default_model_info.tool_call_supported)
|
||||||
vision_supported = model.get('vision_supported', default_model_info.vision_supported)
|
vision_supported = model.get('vision_supported', default_model_info.vision_supported)
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ class ModelManager:
|
|||||||
name=model['name'],
|
name=model['name'],
|
||||||
model_name=model_name,
|
model_name=model_name,
|
||||||
token_mgr=token_mgr,
|
token_mgr=token_mgr,
|
||||||
requester=requester,
|
requester=req,
|
||||||
tool_call_supported=tool_call_supported,
|
tool_call_supported=tool_call_supported,
|
||||||
vision_supported=vision_supported
|
vision_supported=vision_supported
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ class LLMAPIRequester(metaclass=abc.ABCMeta):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
async def call(
|
async def call(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
model: modelmgr_entities.LLMModelInfo,
|
model: modelmgr_entities.LLMModelInfo,
|
||||||
messages: typing.List[llm_entities.Message],
|
messages: typing.List[llm_entities.Message],
|
||||||
funcs: typing.List[tools_entities.LLMFunction] = None,
|
funcs: typing.List[tools_entities.LLMFunction] = None,
|
||||||
0
pkg/provider/modelmgr/requesters/__init__.py
Normal file
0
pkg/provider/modelmgr/requesters/__init__.py
Normal file
@@ -2,34 +2,45 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
import traceback
|
import traceback
|
||||||
|
import base64
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
|
import httpx
|
||||||
|
|
||||||
from .. import api, entities, errors
|
from .. import entities, errors, requester
|
||||||
|
|
||||||
from .. import api, entities, errors
|
from .. import entities, errors
|
||||||
from ....core import entities as core_entities
|
from ....core import entities as core_entities
|
||||||
from ... import entities as llm_entities
|
from ... import entities as llm_entities
|
||||||
from ...tools import entities as tools_entities
|
from ...tools import entities as tools_entities
|
||||||
from ....utils import image
|
from ....utils import image
|
||||||
|
|
||||||
|
|
||||||
@api.requester_class("anthropic-messages")
|
@requester.requester_class("anthropic-messages")
|
||||||
class AnthropicMessages(api.LLMAPIRequester):
|
class AnthropicMessages(requester.LLMAPIRequester):
|
||||||
"""Anthropic Messages API 请求器"""
|
"""Anthropic Messages API 请求器"""
|
||||||
|
|
||||||
client: anthropic.AsyncAnthropic
|
client: anthropic.AsyncAnthropic
|
||||||
|
|
||||||
async def initialize(self):
|
async def initialize(self):
|
||||||
|
|
||||||
|
httpx_client = anthropic._base_client.AsyncHttpxClientWrapper(
|
||||||
|
base_url=self.ap.provider_cfg.data['requester']['anthropic-messages']['base-url'],
|
||||||
|
# cast to a valid type because mypy doesn't understand our type narrowing
|
||||||
|
timeout=typing.cast(httpx.Timeout, self.ap.provider_cfg.data['requester']['anthropic-messages']['timeout']),
|
||||||
|
limits=anthropic._constants.DEFAULT_CONNECTION_LIMITS,
|
||||||
|
follow_redirects=True,
|
||||||
|
proxies=self.ap.proxy_mgr.get_forward_proxies()
|
||||||
|
)
|
||||||
|
|
||||||
self.client = anthropic.AsyncAnthropic(
|
self.client = anthropic.AsyncAnthropic(
|
||||||
api_key="",
|
api_key="",
|
||||||
base_url=self.ap.provider_cfg.data['requester']['anthropic-messages']['base-url'],
|
http_client=httpx_client,
|
||||||
timeout=self.ap.provider_cfg.data['requester']['anthropic-messages']['timeout'],
|
|
||||||
proxies=self.ap.proxy_mgr.get_forward_proxies()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def call(
|
async def call(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
model: entities.LLMModelInfo,
|
model: entities.LLMModelInfo,
|
||||||
messages: typing.List[llm_entities.Message],
|
messages: typing.List[llm_entities.Message],
|
||||||
funcs: typing.List[tools_entities.LLMFunction] = None,
|
funcs: typing.List[tools_entities.LLMFunction] = None,
|
||||||
@@ -61,24 +72,20 @@ class AnthropicMessages(api.LLMAPIRequester):
|
|||||||
if isinstance(m.content, str) and m.content.strip() != "":
|
if isinstance(m.content, str) and m.content.strip() != "":
|
||||||
req_messages.append(m.dict(exclude_none=True))
|
req_messages.append(m.dict(exclude_none=True))
|
||||||
elif isinstance(m.content, list):
|
elif isinstance(m.content, list):
|
||||||
# m.content = [
|
|
||||||
# c for c in m.content if c.type == "text"
|
|
||||||
# ]
|
|
||||||
|
|
||||||
# if len(m.content) > 0:
|
|
||||||
# req_messages.append(m.dict(exclude_none=True))
|
|
||||||
|
|
||||||
msg_dict = m.dict(exclude_none=True)
|
msg_dict = m.dict(exclude_none=True)
|
||||||
|
|
||||||
for i, ce in enumerate(m.content):
|
for i, ce in enumerate(m.content):
|
||||||
if ce.type == "image_url":
|
|
||||||
base64_image, image_format = await image.qq_image_url_to_base64(ce.image_url.url)
|
if ce.type == "image_base64":
|
||||||
|
image_b64, image_format = await image.extract_b64_and_format(ce.image_base64)
|
||||||
|
|
||||||
alter_image_ele = {
|
alter_image_ele = {
|
||||||
"type": "image",
|
"type": "image",
|
||||||
"source": {
|
"source": {
|
||||||
"type": "base64",
|
"type": "base64",
|
||||||
"media_type": f"image/{image_format}",
|
"media_type": f"image/{image_format}",
|
||||||
"data": base64_image
|
"data": image_b64
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
msg_dict["content"][i] = alter_image_ele
|
msg_dict["content"][i] = alter_image_ele
|
||||||
@@ -12,15 +12,15 @@ import httpx
|
|||||||
import aiohttp
|
import aiohttp
|
||||||
import async_lru
|
import async_lru
|
||||||
|
|
||||||
from .. import api, entities, errors
|
from .. import entities, errors, requester
|
||||||
from ....core import entities as core_entities, app
|
from ....core import entities as core_entities, app
|
||||||
from ... import entities as llm_entities
|
from ... import entities as llm_entities
|
||||||
from ...tools import entities as tools_entities
|
from ...tools import entities as tools_entities
|
||||||
from ....utils import image
|
from ....utils import image
|
||||||
|
|
||||||
|
|
||||||
@api.requester_class("openai-chat-completions")
|
@requester.requester_class("openai-chat-completions")
|
||||||
class OpenAIChatCompletions(api.LLMAPIRequester):
|
class OpenAIChatCompletions(requester.LLMAPIRequester):
|
||||||
"""OpenAI ChatCompletion API 请求器"""
|
"""OpenAI ChatCompletion API 请求器"""
|
||||||
|
|
||||||
client: openai.AsyncClient
|
client: openai.AsyncClient
|
||||||
@@ -65,6 +65,7 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
|||||||
|
|
||||||
async def _closure(
|
async def _closure(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
req_messages: list[dict],
|
req_messages: list[dict],
|
||||||
use_model: entities.LLMModelInfo,
|
use_model: entities.LLMModelInfo,
|
||||||
use_funcs: list[tools_entities.LLMFunction] = None,
|
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||||
@@ -87,8 +88,12 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
|||||||
for msg in messages:
|
for msg in messages:
|
||||||
if 'content' in msg and isinstance(msg["content"], list):
|
if 'content' in msg and isinstance(msg["content"], list):
|
||||||
for me in msg["content"]:
|
for me in msg["content"]:
|
||||||
if me["type"] == "image_url":
|
if me["type"] == "image_base64":
|
||||||
me["image_url"]['url'] = await self.get_base64_str(me["image_url"]['url'])
|
me["image_url"] = {
|
||||||
|
"url": me["image_base64"]
|
||||||
|
}
|
||||||
|
me["type"] = "image_url"
|
||||||
|
del me["image_base64"]
|
||||||
|
|
||||||
args["messages"] = messages
|
args["messages"] = messages
|
||||||
|
|
||||||
@@ -102,6 +107,7 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
|||||||
|
|
||||||
async def call(
|
async def call(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
model: entities.LLMModelInfo,
|
model: entities.LLMModelInfo,
|
||||||
messages: typing.List[llm_entities.Message],
|
messages: typing.List[llm_entities.Message],
|
||||||
funcs: typing.List[tools_entities.LLMFunction] = None,
|
funcs: typing.List[tools_entities.LLMFunction] = None,
|
||||||
@@ -118,7 +124,7 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
|||||||
req_messages.append(msg_dict)
|
req_messages.append(msg_dict)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return await self._closure(req_messages, model, funcs)
|
return await self._closure(query=query, req_messages=req_messages, use_model=model, use_funcs=funcs)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
raise errors.RequesterError('请求超时')
|
raise errors.RequesterError('请求超时')
|
||||||
except openai.BadRequestError as e:
|
except openai.BadRequestError as e:
|
||||||
@@ -134,11 +140,3 @@ class OpenAIChatCompletions(api.LLMAPIRequester):
|
|||||||
raise errors.RequesterError(f'请求过于频繁或余额不足: {e.message}')
|
raise errors.RequesterError(f'请求过于频繁或余额不足: {e.message}')
|
||||||
except openai.APIError as e:
|
except openai.APIError as e:
|
||||||
raise errors.RequesterError(f'请求错误: {e.message}')
|
raise errors.RequesterError(f'请求错误: {e.message}')
|
||||||
|
|
||||||
@async_lru.alru_cache(maxsize=128)
|
|
||||||
async def get_base64_str(
|
|
||||||
self,
|
|
||||||
original_url: str,
|
|
||||||
) -> str:
|
|
||||||
base64_image, image_format = await image.qq_image_url_to_base64(original_url)
|
|
||||||
return f"data:image/{image_format};base64,{base64_image}"
|
|
||||||
@@ -1,15 +1,13 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ....core import app
|
|
||||||
|
|
||||||
from . import chatcmpl
|
from . import chatcmpl
|
||||||
from .. import api, entities, errors
|
from .. import entities, errors, requester
|
||||||
from ....core import entities as core_entities, app
|
from ....core import entities as core_entities, app
|
||||||
from ... import entities as llm_entities
|
from ... import entities as llm_entities
|
||||||
from ...tools import entities as tools_entities
|
from ...tools import entities as tools_entities
|
||||||
|
|
||||||
|
|
||||||
@api.requester_class("deepseek-chat-completions")
|
@requester.requester_class("deepseek-chat-completions")
|
||||||
class DeepseekChatCompletions(chatcmpl.OpenAIChatCompletions):
|
class DeepseekChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||||
"""Deepseek ChatCompletion API 请求器"""
|
"""Deepseek ChatCompletion API 请求器"""
|
||||||
|
|
||||||
@@ -19,6 +17,7 @@ class DeepseekChatCompletions(chatcmpl.OpenAIChatCompletions):
|
|||||||
|
|
||||||
async def _closure(
|
async def _closure(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
req_messages: list[dict],
|
req_messages: list[dict],
|
||||||
use_model: entities.LLMModelInfo,
|
use_model: entities.LLMModelInfo,
|
||||||
use_funcs: list[tools_entities.LLMFunction] = None,
|
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||||
54
pkg/provider/modelmgr/requesters/giteeaichatcmpl.py
Normal file
54
pkg/provider/modelmgr/requesters/giteeaichatcmpl.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from . import chatcmpl
|
||||||
|
from .. import entities, errors, requester
|
||||||
|
from ....core import app, entities as core_entities
|
||||||
|
from ... import entities as llm_entities
|
||||||
|
from ...tools import entities as tools_entities
|
||||||
|
from .. import entities as modelmgr_entities
|
||||||
|
|
||||||
|
|
||||||
|
@requester.requester_class("gitee-ai-chat-completions")
|
||||||
|
class GiteeAIChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||||
|
"""Gitee AI ChatCompletions API 请求器"""
|
||||||
|
|
||||||
|
def __init__(self, ap: app.Application):
|
||||||
|
self.ap = ap
|
||||||
|
self.requester_cfg = ap.provider_cfg.data['requester']['gitee-ai-chat-completions'].copy()
|
||||||
|
|
||||||
|
async def _closure(
|
||||||
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
|
req_messages: list[dict],
|
||||||
|
use_model: entities.LLMModelInfo,
|
||||||
|
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||||
|
) -> llm_entities.Message:
|
||||||
|
self.client.api_key = use_model.token_mgr.get_token()
|
||||||
|
|
||||||
|
args = self.requester_cfg['args'].copy()
|
||||||
|
args["model"] = use_model.name if use_model.model_name is None else use_model.model_name
|
||||||
|
|
||||||
|
if use_funcs:
|
||||||
|
tools = await self.ap.tool_mgr.generate_tools_for_openai(use_funcs)
|
||||||
|
|
||||||
|
if tools:
|
||||||
|
args["tools"] = tools
|
||||||
|
|
||||||
|
# gitee 不支持多模态,把content都转换成纯文字
|
||||||
|
for m in req_messages:
|
||||||
|
if 'content' in m and isinstance(m["content"], list):
|
||||||
|
m["content"] = " ".join([c["text"] for c in m["content"]])
|
||||||
|
|
||||||
|
args["messages"] = req_messages
|
||||||
|
|
||||||
|
resp = await self._req(args)
|
||||||
|
|
||||||
|
message = await self._make_msg(resp)
|
||||||
|
|
||||||
|
return message
|
||||||
@@ -3,13 +3,13 @@ from __future__ import annotations
|
|||||||
from ....core import app
|
from ....core import app
|
||||||
|
|
||||||
from . import chatcmpl
|
from . import chatcmpl
|
||||||
from .. import api, entities, errors
|
from .. import entities, errors, requester
|
||||||
from ....core import entities as core_entities, app
|
from ....core import entities as core_entities, app
|
||||||
from ... import entities as llm_entities
|
from ... import entities as llm_entities
|
||||||
from ...tools import entities as tools_entities
|
from ...tools import entities as tools_entities
|
||||||
|
|
||||||
|
|
||||||
@api.requester_class("moonshot-chat-completions")
|
@requester.requester_class("moonshot-chat-completions")
|
||||||
class MoonshotChatCompletions(chatcmpl.OpenAIChatCompletions):
|
class MoonshotChatCompletions(chatcmpl.OpenAIChatCompletions):
|
||||||
"""Moonshot ChatCompletion API 请求器"""
|
"""Moonshot ChatCompletion API 请求器"""
|
||||||
|
|
||||||
@@ -19,6 +19,7 @@ class MoonshotChatCompletions(chatcmpl.OpenAIChatCompletions):
|
|||||||
|
|
||||||
async def _closure(
|
async def _closure(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
req_messages: list[dict],
|
req_messages: list[dict],
|
||||||
use_model: entities.LLMModelInfo,
|
use_model: entities.LLMModelInfo,
|
||||||
use_funcs: list[tools_entities.LLMFunction] = None,
|
use_funcs: list[tools_entities.LLMFunction] = None,
|
||||||
@@ -4,21 +4,24 @@ import asyncio
|
|||||||
import os
|
import os
|
||||||
import typing
|
import typing
|
||||||
from typing import Union, Mapping, Any, AsyncIterator
|
from typing import Union, Mapping, Any, AsyncIterator
|
||||||
|
import uuid
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
|
||||||
import async_lru
|
import async_lru
|
||||||
import ollama
|
import ollama
|
||||||
|
|
||||||
from .. import api, entities, errors
|
from .. import entities, errors, requester
|
||||||
from ... import entities as llm_entities
|
from ... import entities as llm_entities
|
||||||
from ...tools import entities as tools_entities
|
from ...tools import entities as tools_entities
|
||||||
from ....core import app
|
from ....core import app, entities as core_entities
|
||||||
from ....utils import image
|
from ....utils import image
|
||||||
|
|
||||||
REQUESTER_NAME: str = "ollama-chat"
|
REQUESTER_NAME: str = "ollama-chat"
|
||||||
|
|
||||||
|
|
||||||
@api.requester_class(REQUESTER_NAME)
|
@requester.requester_class(REQUESTER_NAME)
|
||||||
class OllamaChatCompletions(api.LLMAPIRequester):
|
class OllamaChatCompletions(requester.LLMAPIRequester):
|
||||||
"""Ollama平台 ChatCompletion API请求器"""
|
"""Ollama平台 ChatCompletion API请求器"""
|
||||||
client: ollama.AsyncClient
|
client: ollama.AsyncClient
|
||||||
request_cfg: dict
|
request_cfg: dict
|
||||||
@@ -41,7 +44,7 @@ class OllamaChatCompletions(api.LLMAPIRequester):
|
|||||||
**args
|
**args
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _closure(self, req_messages: list[dict], use_model: entities.LLMModelInfo,
|
async def _closure(self, query: core_entities.Query, req_messages: list[dict], use_model: entities.LLMModelInfo,
|
||||||
user_funcs: list[tools_entities.LLMFunction] = None) -> (
|
user_funcs: list[tools_entities.LLMFunction] = None) -> (
|
||||||
llm_entities.Message):
|
llm_entities.Message):
|
||||||
args: Any = self.request_cfg['args'].copy()
|
args: Any = self.request_cfg['args'].copy()
|
||||||
@@ -55,30 +58,59 @@ class OllamaChatCompletions(api.LLMAPIRequester):
|
|||||||
for me in msg["content"]:
|
for me in msg["content"]:
|
||||||
if me["type"] == "text":
|
if me["type"] == "text":
|
||||||
text_content.append(me["text"])
|
text_content.append(me["text"])
|
||||||
elif me["type"] == "image_url":
|
elif me["type"] == "image_base64":
|
||||||
image_url = await self.get_base64_str(me["image_url"]['url'])
|
image_urls.append(me["image_base64"])
|
||||||
image_urls.append(image_url)
|
|
||||||
msg["content"] = "\n".join(text_content)
|
msg["content"] = "\n".join(text_content)
|
||||||
msg["images"] = [url.split(',')[1] for url in image_urls]
|
msg["images"] = [url.split(',')[1] for url in image_urls]
|
||||||
|
if 'tool_calls' in msg: # LangBot 内部以 str 存储 tool_calls 的参数,这里需要转换为 dict
|
||||||
|
for tool_call in msg['tool_calls']:
|
||||||
|
tool_call['function']['arguments'] = json.loads(tool_call['function']['arguments'])
|
||||||
args["messages"] = messages
|
args["messages"] = messages
|
||||||
|
|
||||||
resp: Mapping[str, Any] | AsyncIterator[Mapping[str, Any]] = await self._req(args)
|
args["tools"] = []
|
||||||
|
if user_funcs:
|
||||||
|
tools = await self.ap.tool_mgr.generate_tools_for_openai(user_funcs)
|
||||||
|
if tools:
|
||||||
|
args["tools"] = tools
|
||||||
|
|
||||||
|
resp = await self._req(args)
|
||||||
message: llm_entities.Message = await self._make_msg(resp)
|
message: llm_entities.Message = await self._make_msg(resp)
|
||||||
return message
|
return message
|
||||||
|
|
||||||
async def _make_msg(
|
async def _make_msg(
|
||||||
self,
|
self,
|
||||||
chat_completions: Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]) -> llm_entities.Message:
|
chat_completions: ollama.ChatResponse) -> llm_entities.Message:
|
||||||
message: Any = chat_completions.pop('message', None)
|
message: ollama.Message = chat_completions.message
|
||||||
if message is None:
|
if message is None:
|
||||||
raise ValueError("chat_completions must contain a 'message' field")
|
raise ValueError("chat_completions must contain a 'message' field")
|
||||||
|
|
||||||
message.update(chat_completions)
|
ret_msg: llm_entities.Message = None
|
||||||
ret_msg: llm_entities.Message = llm_entities.Message(**message)
|
|
||||||
|
if message.content is not None:
|
||||||
|
ret_msg = llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
content=message.content
|
||||||
|
)
|
||||||
|
if message.tool_calls is not None and len(message.tool_calls) > 0:
|
||||||
|
tool_calls: list[llm_entities.ToolCall] = []
|
||||||
|
|
||||||
|
for tool_call in message.tool_calls:
|
||||||
|
tool_calls.append(llm_entities.ToolCall(
|
||||||
|
id=uuid.uuid4().hex,
|
||||||
|
type="function",
|
||||||
|
function=llm_entities.FunctionCall(
|
||||||
|
name=tool_call.function.name,
|
||||||
|
arguments=json.dumps(tool_call.function.arguments)
|
||||||
|
)
|
||||||
|
))
|
||||||
|
ret_msg.tool_calls = tool_calls
|
||||||
|
|
||||||
return ret_msg
|
return ret_msg
|
||||||
|
|
||||||
async def call(
|
async def call(
|
||||||
self,
|
self,
|
||||||
|
query: core_entities.Query,
|
||||||
model: entities.LLMModelInfo,
|
model: entities.LLMModelInfo,
|
||||||
messages: typing.List[llm_entities.Message],
|
messages: typing.List[llm_entities.Message],
|
||||||
funcs: typing.List[tools_entities.LLMFunction] = None,
|
funcs: typing.List[tools_entities.LLMFunction] = None,
|
||||||
@@ -92,14 +124,6 @@ class OllamaChatCompletions(api.LLMAPIRequester):
|
|||||||
msg_dict["content"] = "\n".join(part["text"] for part in content)
|
msg_dict["content"] = "\n".join(part["text"] for part in content)
|
||||||
req_messages.append(msg_dict)
|
req_messages.append(msg_dict)
|
||||||
try:
|
try:
|
||||||
return await self._closure(req_messages, model)
|
return await self._closure(query, req_messages, model, funcs)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
raise errors.RequesterError('请求超时')
|
raise errors.RequesterError('请求超时')
|
||||||
|
|
||||||
@async_lru.alru_cache(maxsize=128)
|
|
||||||
async def get_base64_str(
|
|
||||||
self,
|
|
||||||
original_url: str,
|
|
||||||
) -> str:
|
|
||||||
base64_image, image_format = await image.qq_image_url_to_base64(original_url)
|
|
||||||
return f"data:image/{image_format};base64,{base64_image}"
|
|
||||||
@@ -2,8 +2,6 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
import pydantic
|
|
||||||
|
|
||||||
|
|
||||||
class TokenManager():
|
class TokenManager():
|
||||||
"""鉴权 Token 管理器
|
"""鉴权 Token 管理器
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from . import runner
|
|||||||
from ..core import app
|
from ..core import app
|
||||||
|
|
||||||
from .runners import localagent
|
from .runners import localagent
|
||||||
|
from .runners import difysvapi
|
||||||
|
|
||||||
class RunnerManager:
|
class RunnerManager:
|
||||||
|
|
||||||
|
|||||||
278
pkg/provider/runners/difysvapi.py
Normal file
278
pkg/provider/runners/difysvapi.py
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from .. import runner
|
||||||
|
from ...core import entities as core_entities
|
||||||
|
from .. import entities as llm_entities
|
||||||
|
from ...utils import image
|
||||||
|
|
||||||
|
from libs.dify_service_api.v1 import client, errors
|
||||||
|
|
||||||
|
|
||||||
|
@runner.runner_class("dify-service-api")
|
||||||
|
class DifyServiceAPIRunner(runner.RequestRunner):
|
||||||
|
"""Dify Service API 对话请求器"""
|
||||||
|
|
||||||
|
dify_client: client.AsyncDifyServiceClient
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""初始化"""
|
||||||
|
valid_app_types = ["chat", "agent", "workflow"]
|
||||||
|
if (
|
||||||
|
self.ap.provider_cfg.data["dify-service-api"]["app-type"]
|
||||||
|
not in valid_app_types
|
||||||
|
):
|
||||||
|
raise errors.DifyAPIError(
|
||||||
|
f"不支持的 Dify 应用类型: {self.ap.provider_cfg.data['dify-service-api']['app-type']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
api_key = self.ap.provider_cfg.data["dify-service-api"][
|
||||||
|
self.ap.provider_cfg.data["dify-service-api"]["app-type"]
|
||||||
|
]["api-key"]
|
||||||
|
|
||||||
|
self.dify_client = client.AsyncDifyServiceClient(
|
||||||
|
api_key=api_key,
|
||||||
|
base_url=self.ap.provider_cfg.data["dify-service-api"]["base-url"],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _preprocess_user_message(
|
||||||
|
self, query: core_entities.Query
|
||||||
|
) -> tuple[str, list[str]]:
|
||||||
|
"""预处理用户消息,提取纯文本,并将图片上传到 Dify 服务
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
tuple[str, list[str]]: 纯文本和图片的 Dify 服务图片 ID
|
||||||
|
"""
|
||||||
|
plain_text = ""
|
||||||
|
image_ids = []
|
||||||
|
if isinstance(query.user_message.content, list):
|
||||||
|
for ce in query.user_message.content:
|
||||||
|
if ce.type == "text":
|
||||||
|
plain_text += ce.text
|
||||||
|
elif ce.type == "image_base64":
|
||||||
|
image_b64, image_format = await image.extract_b64_and_format(ce.image_base64)
|
||||||
|
file_bytes = base64.b64decode(image_b64)
|
||||||
|
file = ("img.png", file_bytes, f"image/{image_format}")
|
||||||
|
file_upload_resp = await self.dify_client.upload_file(
|
||||||
|
file,
|
||||||
|
f"{query.session.launcher_type.value}_{query.session.launcher_id}",
|
||||||
|
)
|
||||||
|
image_id = file_upload_resp["id"]
|
||||||
|
image_ids.append(image_id)
|
||||||
|
elif isinstance(query.user_message.content, str):
|
||||||
|
plain_text = query.user_message.content
|
||||||
|
|
||||||
|
return plain_text, image_ids
|
||||||
|
|
||||||
|
async def _chat_messages(
|
||||||
|
self, query: core_entities.Query
|
||||||
|
) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
||||||
|
"""调用聊天助手"""
|
||||||
|
cov_id = query.session.using_conversation.uuid or ""
|
||||||
|
|
||||||
|
plain_text, image_ids = await self._preprocess_user_message(query)
|
||||||
|
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"type": "image",
|
||||||
|
"transfer_method": "local_file",
|
||||||
|
"upload_file_id": image_id,
|
||||||
|
}
|
||||||
|
for image_id in image_ids
|
||||||
|
]
|
||||||
|
|
||||||
|
mode = "basic" # 标记是基础编排还是工作流编排
|
||||||
|
|
||||||
|
basic_mode_pending_chunk = ''
|
||||||
|
|
||||||
|
async for chunk in self.dify_client.chat_messages(
|
||||||
|
inputs={},
|
||||||
|
query=plain_text,
|
||||||
|
user=f"{query.session.launcher_type.value}_{query.session.launcher_id}",
|
||||||
|
conversation_id=cov_id,
|
||||||
|
files=files,
|
||||||
|
timeout=self.ap.provider_cfg.data["dify-service-api"]["chat"]["timeout"],
|
||||||
|
):
|
||||||
|
self.ap.logger.debug("dify-chat-chunk: ", chunk)
|
||||||
|
|
||||||
|
if chunk['event'] == 'workflow_started':
|
||||||
|
mode = "workflow"
|
||||||
|
|
||||||
|
if mode == "workflow":
|
||||||
|
if chunk['event'] == 'node_finished':
|
||||||
|
if chunk['data']['node_type'] == 'answer':
|
||||||
|
yield llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
content=chunk['data']['outputs']['answer'],
|
||||||
|
)
|
||||||
|
elif mode == "basic":
|
||||||
|
if chunk['event'] == 'message':
|
||||||
|
basic_mode_pending_chunk += chunk['answer']
|
||||||
|
elif chunk['event'] == 'message_end':
|
||||||
|
yield llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
content=basic_mode_pending_chunk,
|
||||||
|
)
|
||||||
|
basic_mode_pending_chunk = ''
|
||||||
|
|
||||||
|
query.session.using_conversation.uuid = chunk["conversation_id"]
|
||||||
|
|
||||||
|
async def _agent_chat_messages(
|
||||||
|
self, query: core_entities.Query
|
||||||
|
) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
||||||
|
"""调用聊天助手"""
|
||||||
|
cov_id = query.session.using_conversation.uuid or ""
|
||||||
|
|
||||||
|
plain_text, image_ids = await self._preprocess_user_message(query)
|
||||||
|
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"type": "image",
|
||||||
|
"transfer_method": "local_file",
|
||||||
|
"upload_file_id": image_id,
|
||||||
|
}
|
||||||
|
for image_id in image_ids
|
||||||
|
]
|
||||||
|
|
||||||
|
ignored_events = ["agent_message"]
|
||||||
|
|
||||||
|
async for chunk in self.dify_client.chat_messages(
|
||||||
|
inputs={},
|
||||||
|
query=plain_text,
|
||||||
|
user=f"{query.session.launcher_type.value}_{query.session.launcher_id}",
|
||||||
|
response_mode="streaming",
|
||||||
|
conversation_id=cov_id,
|
||||||
|
files=files,
|
||||||
|
timeout=self.ap.provider_cfg.data["dify-service-api"]["chat"]["timeout"],
|
||||||
|
):
|
||||||
|
self.ap.logger.debug("dify-agent-chunk: ", chunk)
|
||||||
|
if chunk["event"] in ignored_events:
|
||||||
|
continue
|
||||||
|
if chunk["event"] == "agent_thought":
|
||||||
|
|
||||||
|
if chunk['tool'] != '' and chunk['observation'] != '': # 工具调用结果,跳过
|
||||||
|
continue
|
||||||
|
|
||||||
|
if chunk['thought'].strip() != '': # 文字回复内容
|
||||||
|
msg = llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
content=chunk["thought"],
|
||||||
|
)
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
if chunk['tool']:
|
||||||
|
msg = llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
tool_calls=[
|
||||||
|
llm_entities.ToolCall(
|
||||||
|
id=chunk['id'],
|
||||||
|
type="function",
|
||||||
|
function=llm_entities.FunctionCall(
|
||||||
|
name=chunk["tool"],
|
||||||
|
arguments=json.dumps({}),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
query.session.using_conversation.uuid = chunk["conversation_id"]
|
||||||
|
|
||||||
|
async def _workflow_messages(
|
||||||
|
self, query: core_entities.Query
|
||||||
|
) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
||||||
|
"""调用工作流"""
|
||||||
|
|
||||||
|
if not query.session.using_conversation.uuid:
|
||||||
|
query.session.using_conversation.uuid = str(uuid.uuid4())
|
||||||
|
|
||||||
|
cov_id = query.session.using_conversation.uuid
|
||||||
|
|
||||||
|
plain_text, image_ids = await self._preprocess_user_message(query)
|
||||||
|
|
||||||
|
files = [
|
||||||
|
{
|
||||||
|
"type": "image",
|
||||||
|
"transfer_method": "local_file",
|
||||||
|
"upload_file_id": image_id,
|
||||||
|
}
|
||||||
|
for image_id in image_ids
|
||||||
|
]
|
||||||
|
|
||||||
|
ignored_events = ["text_chunk", "workflow_started"]
|
||||||
|
|
||||||
|
async for chunk in self.dify_client.workflow_run(
|
||||||
|
inputs={
|
||||||
|
"langbot_user_message_text": plain_text,
|
||||||
|
"langbot_session_id": f"{query.session.launcher_type.value}_{query.session.launcher_id}",
|
||||||
|
"langbot_conversation_id": cov_id,
|
||||||
|
},
|
||||||
|
user=f"{query.session.launcher_type.value}_{query.session.launcher_id}",
|
||||||
|
files=files,
|
||||||
|
timeout=self.ap.provider_cfg.data["dify-service-api"]["workflow"]["timeout"],
|
||||||
|
):
|
||||||
|
self.ap.logger.debug("dify-workflow-chunk: ", chunk)
|
||||||
|
if chunk["event"] in ignored_events:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if chunk["event"] == "node_started":
|
||||||
|
|
||||||
|
if (
|
||||||
|
chunk["data"]["node_type"] == "start"
|
||||||
|
or chunk["data"]["node_type"] == "end"
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
msg = llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
content=None,
|
||||||
|
tool_calls=[
|
||||||
|
llm_entities.ToolCall(
|
||||||
|
id=chunk["data"]["node_id"],
|
||||||
|
type="function",
|
||||||
|
function=llm_entities.FunctionCall(
|
||||||
|
name=chunk["data"]["title"],
|
||||||
|
arguments=json.dumps({}),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
elif chunk["event"] == "workflow_finished":
|
||||||
|
if chunk['data']['error']:
|
||||||
|
raise errors.DifyAPIError(chunk['data']['error'])
|
||||||
|
|
||||||
|
msg = llm_entities.Message(
|
||||||
|
role="assistant",
|
||||||
|
content=chunk["data"]["outputs"][
|
||||||
|
self.ap.provider_cfg.data["dify-service-api"]["workflow"][
|
||||||
|
"output-key"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
async def run(
|
||||||
|
self, query: core_entities.Query
|
||||||
|
) -> typing.AsyncGenerator[llm_entities.Message, None]:
|
||||||
|
"""运行请求"""
|
||||||
|
if self.ap.provider_cfg.data["dify-service-api"]["app-type"] == "chat":
|
||||||
|
async for msg in self._chat_messages(query):
|
||||||
|
yield msg
|
||||||
|
elif self.ap.provider_cfg.data["dify-service-api"]["app-type"] == "agent":
|
||||||
|
async for msg in self._agent_chat_messages(query):
|
||||||
|
yield msg
|
||||||
|
elif self.ap.provider_cfg.data["dify-service-api"]["app-type"] == "workflow":
|
||||||
|
async for msg in self._workflow_messages(query):
|
||||||
|
yield msg
|
||||||
|
else:
|
||||||
|
raise errors.DifyAPIError(
|
||||||
|
f"不支持的 Dify 应用类型: {self.ap.provider_cfg.data['dify-service-api']['app-type']}"
|
||||||
|
)
|
||||||
@@ -23,7 +23,7 @@ class LocalAgentRunner(runner.RequestRunner):
|
|||||||
req_messages = query.prompt.messages.copy() + query.messages.copy() + [query.user_message]
|
req_messages = query.prompt.messages.copy() + query.messages.copy() + [query.user_message]
|
||||||
|
|
||||||
# 首次请求
|
# 首次请求
|
||||||
msg = await query.use_model.requester.call(query.use_model, req_messages, query.use_funcs)
|
msg = await query.use_model.requester.call(query, query.use_model, req_messages, query.use_funcs)
|
||||||
|
|
||||||
yield msg
|
yield msg
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ class LocalAgentRunner(runner.RequestRunner):
|
|||||||
req_messages.append(err_msg)
|
req_messages.append(err_msg)
|
||||||
|
|
||||||
# 处理完所有调用,再次请求
|
# 处理完所有调用,再次请求
|
||||||
msg = await query.use_model.requester.call(query.use_model, req_messages, query.use_funcs)
|
msg = await query.use_model.requester.call(query, query.use_model, req_messages, query.use_funcs)
|
||||||
|
|
||||||
yield msg
|
yield msg
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import typing
|
import typing
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ...provider import entities
|
from ...provider import entities
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import abc
|
|||||||
import typing
|
import typing
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
|
|
||||||
from ...core import entities as core_entities
|
from ...core import entities as core_entities
|
||||||
|
|
||||||
|
|||||||
@@ -118,10 +118,9 @@ class ToolManager:
|
|||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return f"error occurred when executing function {name}: {e}"
|
return f"error occurred when executing function {name}: {e}"
|
||||||
finally:
|
finally:
|
||||||
|
|
||||||
plugin = None
|
plugin = None
|
||||||
|
|
||||||
for p in self.ap.plugin_mgr.plugins:
|
for p in self.ap.plugin_mgr.plugins():
|
||||||
if function in p.content_functions:
|
if function in p.content_functions:
|
||||||
plugin = p
|
plugin = p
|
||||||
break
|
break
|
||||||
@@ -137,4 +136,4 @@ class ToolManager:
|
|||||||
},
|
},
|
||||||
function_name=function.name,
|
function_name=function.name,
|
||||||
function_description=function.description,
|
function_description=function.description,
|
||||||
)
|
)
|
||||||
@@ -6,7 +6,7 @@ import os
|
|||||||
import base64
|
import base64
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import pydantic
|
import pydantic.v1 as pydantic
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from ..core import app
|
from ..core import app
|
||||||
@@ -62,11 +62,11 @@ class AnnouncementManager:
|
|||||||
async def fetch_saved(
|
async def fetch_saved(
|
||||||
self
|
self
|
||||||
) -> list[Announcement]:
|
) -> list[Announcement]:
|
||||||
if not os.path.exists("res/announcement_saved.json"):
|
if not os.path.exists("data/labels/announcement_saved.json"):
|
||||||
with open("res/announcement_saved.json", "w", encoding="utf-8") as f:
|
with open("data/labels/announcement_saved.json", "w", encoding="utf-8") as f:
|
||||||
f.write("[]")
|
f.write("[]")
|
||||||
|
|
||||||
with open("res/announcement_saved.json", "r", encoding="utf-8") as f:
|
with open("data/labels/announcement_saved.json", "r", encoding="utf-8") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
if not content:
|
if not content:
|
||||||
@@ -79,7 +79,7 @@ class AnnouncementManager:
|
|||||||
content: list[Announcement]
|
content: list[Announcement]
|
||||||
):
|
):
|
||||||
|
|
||||||
with open("res/announcement_saved.json", "w", encoding="utf-8") as f:
|
with open("data/labels/announcement_saved.json", "w", encoding="utf-8") as f:
|
||||||
f.write(json.dumps([
|
f.write(json.dumps([
|
||||||
item.to_dict() for item in content
|
item.to_dict() for item in content
|
||||||
], indent=4, ensure_ascii=False))
|
], indent=4, ensure_ascii=False))
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
semantic_version = "v3.4.0"
|
semantic_version = "v3.4.1.5"
|
||||||
|
|
||||||
debug_mode = False
|
debug_mode = False
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,46 @@
|
|||||||
import base64
|
import base64
|
||||||
import typing
|
import typing
|
||||||
|
import io
|
||||||
from urllib.parse import urlparse, parse_qs
|
from urllib.parse import urlparse, parse_qs
|
||||||
import ssl
|
import ssl
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
import PIL.Image
|
||||||
|
|
||||||
|
|
||||||
|
def get_qq_image_downloadable_url(image_url: str) -> tuple[str, dict]:
|
||||||
|
"""获取QQ图片的下载链接"""
|
||||||
|
parsed = urlparse(image_url)
|
||||||
|
query = parse_qs(parsed.query)
|
||||||
|
return f"http://{parsed.netloc}{parsed.path}", query
|
||||||
|
|
||||||
|
|
||||||
|
async def get_qq_image_bytes(image_url: str, query: dict={}) -> tuple[bytes, str]:
|
||||||
|
"""[弃用]获取QQ图片的bytes"""
|
||||||
|
image_url, query_in_url = get_qq_image_downloadable_url(image_url)
|
||||||
|
query = {**query, **query_in_url}
|
||||||
|
ssl_context = ssl.create_default_context()
|
||||||
|
ssl_context.check_hostname = False
|
||||||
|
ssl_context.verify_mode = ssl.CERT_NONE
|
||||||
|
async with aiohttp.ClientSession(trust_env=False) as session:
|
||||||
|
async with session.get(image_url, params=query, ssl=ssl_context) as resp:
|
||||||
|
resp.raise_for_status()
|
||||||
|
file_bytes = await resp.read()
|
||||||
|
content_type = resp.headers.get('Content-Type')
|
||||||
|
if not content_type:
|
||||||
|
image_format = 'jpeg'
|
||||||
|
elif not content_type.startswith('image/'):
|
||||||
|
pil_img = PIL.Image.open(io.BytesIO(file_bytes))
|
||||||
|
image_format = pil_img.format.lower()
|
||||||
|
else:
|
||||||
|
image_format = content_type.split('/')[-1]
|
||||||
|
return file_bytes, image_format
|
||||||
|
|
||||||
|
|
||||||
async def qq_image_url_to_base64(
|
async def qq_image_url_to_base64(
|
||||||
image_url: str
|
image_url: str
|
||||||
) -> typing.Tuple[str, str]:
|
) -> typing.Tuple[str, str]:
|
||||||
"""将QQ图片URL转为base64,并返回图片格式
|
"""[弃用]将QQ图片URL转为base64,并返回图片格式
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
image_url (str): QQ图片URL
|
image_url (str): QQ图片URL
|
||||||
@@ -17,30 +48,23 @@ async def qq_image_url_to_base64(
|
|||||||
Returns:
|
Returns:
|
||||||
typing.Tuple[str, str]: base64编码和图片格式
|
typing.Tuple[str, str]: base64编码和图片格式
|
||||||
"""
|
"""
|
||||||
parsed = urlparse(image_url)
|
image_url, query = get_qq_image_downloadable_url(image_url)
|
||||||
query = parse_qs(parsed.query)
|
|
||||||
|
|
||||||
# Flatten the query dictionary
|
# Flatten the query dictionary
|
||||||
query = {k: v[0] for k, v in query.items()}
|
query = {k: v[0] for k, v in query.items()}
|
||||||
|
|
||||||
ssl_context = ssl.create_default_context()
|
file_bytes, image_format = await get_qq_image_bytes(image_url, query)
|
||||||
ssl_context.check_hostname = False
|
|
||||||
ssl_context.verify_mode = ssl.CERT_NONE
|
|
||||||
|
|
||||||
async with aiohttp.ClientSession(trust_env=False) as session:
|
|
||||||
async with session.get(
|
|
||||||
f"http://{parsed.netloc}{parsed.path}",
|
|
||||||
params=query,
|
|
||||||
ssl=ssl_context
|
|
||||||
) as resp:
|
|
||||||
resp.raise_for_status() # 检查HTTP错误
|
|
||||||
file_bytes = await resp.read()
|
|
||||||
content_type = resp.headers.get('Content-Type')
|
|
||||||
if not content_type or not content_type.startswith('image/'):
|
|
||||||
image_format = 'jpeg'
|
|
||||||
else:
|
|
||||||
image_format = content_type.split('/')[-1]
|
|
||||||
|
|
||||||
base64_str = base64.b64encode(file_bytes).decode()
|
base64_str = base64.b64encode(file_bytes).decode()
|
||||||
|
|
||||||
return base64_str, image_format
|
return base64_str, image_format
|
||||||
|
|
||||||
|
async def extract_b64_and_format(image_base64_data: str) -> typing.Tuple[str, str]:
|
||||||
|
"""提取base64编码和图片格式
|
||||||
|
|
||||||
|
data:image/jpeg;base64,xxx
|
||||||
|
提取出base64编码和图片格式
|
||||||
|
"""
|
||||||
|
base64_str = image_base64_data.split(',')[-1]
|
||||||
|
image_format = image_base64_data.split(':')[-1].split(';')[0].split('/')[-1]
|
||||||
|
return base64_str, image_format
|
||||||
@@ -2,7 +2,7 @@ import aiohttp
|
|||||||
|
|
||||||
async def get_myip() -> str:
|
async def get_myip() -> str:
|
||||||
try:
|
try:
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=10)) as session:
|
||||||
async with session.get("https://ip.useragentinfo.com/myip") as response:
|
async with session.get("https://ip.useragentinfo.com/myip") as response:
|
||||||
return await response.text()
|
return await response.text()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import pydantic
|
|
||||||
|
|
||||||
|
|
||||||
LOG_PAGE_SIZE = 20
|
LOG_PAGE_SIZE = 20
|
||||||
MAX_CACHED_PAGES = 10
|
MAX_CACHED_PAGES = 10
|
||||||
|
|||||||
@@ -22,13 +22,3 @@ def install_requirements(file):
|
|||||||
pipmain(['install', '-r', file, "-i", "https://pypi.tuna.tsinghua.edu.cn/simple",
|
pipmain(['install', '-r', file, "-i", "https://pypi.tuna.tsinghua.edu.cn/simple",
|
||||||
"--trusted-host", "pypi.tuna.tsinghua.edu.cn"])
|
"--trusted-host", "pypi.tuna.tsinghua.edu.cn"])
|
||||||
# log.reset_logging()
|
# log.reset_logging()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
try:
|
|
||||||
install("openai11")
|
|
||||||
except Exception as e:
|
|
||||||
print(111)
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
print(222)
|
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# direct
|
||||||
requests
|
requests
|
||||||
openai>1.0.0
|
openai>1.0.0
|
||||||
anthropic
|
anthropic
|
||||||
@@ -9,7 +10,7 @@ Pillow
|
|||||||
tiktoken
|
tiktoken
|
||||||
PyYaml
|
PyYaml
|
||||||
aiohttp
|
aiohttp
|
||||||
pydantic<2.0
|
pydantic>2.0
|
||||||
websockets
|
websockets
|
||||||
urllib3
|
urllib3
|
||||||
psutil
|
psutil
|
||||||
@@ -22,4 +23,7 @@ quart-cors
|
|||||||
aiofiles
|
aiofiles
|
||||||
aioshutil
|
aioshutil
|
||||||
argon2-cffi
|
argon2-cffi
|
||||||
pyjwt
|
pyjwt
|
||||||
|
|
||||||
|
# indirect
|
||||||
|
taskgroup==0.0.0a4
|
||||||
BIN
res/social.png
BIN
res/social.png
Binary file not shown.
|
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 157 KiB |
@@ -8,69 +8,59 @@
|
|||||||
"vision_supported": false
|
"vision_supported": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "gpt-3.5-turbo-0125",
|
"name": "gpt-4o",
|
||||||
"tool_call_supported": true,
|
"tool_call_supported": true,
|
||||||
"vision_supported": false
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "gpt-3.5-turbo",
|
"name": "gpt-4o-2024-11-20",
|
||||||
"tool_call_supported": true,
|
"tool_call_supported": true,
|
||||||
"vision_supported": false
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "gpt-3.5-turbo-1106",
|
"name": "gpt-4o-2024-08-06",
|
||||||
"tool_call_supported": true,
|
"tool_call_supported": true,
|
||||||
"vision_supported": false
|
"vision_supported": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "gpt-4o-2024-05-13",
|
||||||
|
"tool_call_supported": true,
|
||||||
|
"vision_supported": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "chatgpt-4o-latest",
|
||||||
|
"tool_call_supported": true,
|
||||||
|
"vision_supported": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "gpt-4o-mini",
|
||||||
|
"tool_call_supported": true,
|
||||||
|
"vision_supported": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "o1-preview",
|
||||||
|
"tool_call_supported": true,
|
||||||
|
"vision_supported": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "o1-mini",
|
||||||
|
"tool_call_supported": true,
|
||||||
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "gpt-4-turbo",
|
"name": "gpt-4-turbo",
|
||||||
"tool_call_supported": true,
|
"tool_call_supported": true,
|
||||||
"vision_supported": true
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "gpt-4-turbo-2024-04-09",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "gpt-4-turbo-preview",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "gpt-4-0125-preview",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "gpt-4-1106-preview",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "gpt-4",
|
"name": "gpt-4",
|
||||||
"tool_call_supported": true,
|
"tool_call_supported": true,
|
||||||
"vision_supported": true
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "gpt-4o",
|
"name": "gpt-3.5-turbo",
|
||||||
"tool_call_supported": true,
|
"tool_call_supported": true,
|
||||||
"vision_supported": true
|
"vision_supported": false
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "gpt-4-0613",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "gpt-4-32k",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "gpt-4-32k-0613",
|
|
||||||
"tool_call_supported": true,
|
|
||||||
"vision_supported": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"model_name": "SparkDesk",
|
"model_name": "SparkDesk",
|
||||||
@@ -81,19 +71,19 @@
|
|||||||
"name": "OneAPI/gemini-pro"
|
"name": "OneAPI/gemini-pro"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "claude-3-opus-20240229",
|
"name": "claude-3-opus-latest",
|
||||||
"requester": "anthropic-messages",
|
"requester": "anthropic-messages",
|
||||||
"token_mgr": "anthropic",
|
"token_mgr": "anthropic",
|
||||||
"vision_supported": true
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "claude-3-sonnet-20240229",
|
"name": "claude-3-5-sonnet-latest",
|
||||||
"requester": "anthropic-messages",
|
"requester": "anthropic-messages",
|
||||||
"token_mgr": "anthropic",
|
"token_mgr": "anthropic",
|
||||||
"vision_supported": true
|
"vision_supported": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "claude-3-haiku-20240307",
|
"name": "claude-3-5-haiku-latest",
|
||||||
"requester": "anthropic-messages",
|
"requester": "anthropic-messages",
|
||||||
"token_mgr": "anthropic",
|
"token_mgr": "anthropic",
|
||||||
"vision_supported": true
|
"vision_supported": true
|
||||||
@@ -120,6 +110,11 @@
|
|||||||
"name": "deepseek-chat",
|
"name": "deepseek-chat",
|
||||||
"requester": "deepseek-chat-completions",
|
"requester": "deepseek-chat-completions",
|
||||||
"token_mgr": "deepseek"
|
"token_mgr": "deepseek"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "deepseek-coder",
|
||||||
|
"requester": "deepseek-chat-completions",
|
||||||
|
"token_mgr": "deepseek"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"adapter": "aiocqhttp",
|
"adapter": "aiocqhttp",
|
||||||
"enable": false,
|
"enable": true,
|
||||||
"host": "0.0.0.0",
|
"host": "0.0.0.0",
|
||||||
"port": 2280,
|
"port": 2280,
|
||||||
"access-token": ""
|
"access-token": ""
|
||||||
|
|||||||
@@ -13,6 +13,9 @@
|
|||||||
],
|
],
|
||||||
"deepseek": [
|
"deepseek": [
|
||||||
"sk-1234567890"
|
"sk-1234567890"
|
||||||
|
],
|
||||||
|
"gitee-ai": [
|
||||||
|
"XXXXX"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"requester": {
|
"requester": {
|
||||||
@@ -42,12 +45,34 @@
|
|||||||
"base-url": "http://127.0.0.1:11434",
|
"base-url": "http://127.0.0.1:11434",
|
||||||
"args": {},
|
"args": {},
|
||||||
"timeout": 600
|
"timeout": 600
|
||||||
|
},
|
||||||
|
"gitee-ai-chat-completions": {
|
||||||
|
"base-url": "https://ai.gitee.com/v1",
|
||||||
|
"args": {},
|
||||||
|
"timeout": 120
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"model": "gpt-3.5-turbo",
|
"model": "gpt-4o",
|
||||||
"prompt-mode": "normal",
|
"prompt-mode": "normal",
|
||||||
"prompt": {
|
"prompt": {
|
||||||
"default": ""
|
"default": ""
|
||||||
},
|
},
|
||||||
"runner": "local-agent"
|
"runner": "local-agent",
|
||||||
|
"dify-service-api": {
|
||||||
|
"base-url": "https://api.dify.ai/v1",
|
||||||
|
"app-type": "chat",
|
||||||
|
"chat": {
|
||||||
|
"api-key": "app-1234567890",
|
||||||
|
"timeout": 120
|
||||||
|
},
|
||||||
|
"agent": {
|
||||||
|
"api-key": "app-1234567890",
|
||||||
|
"timeout": 120
|
||||||
|
},
|
||||||
|
"workflow": {
|
||||||
|
"api-key": "app-1234567890",
|
||||||
|
"output-key": "summary",
|
||||||
|
"timeout": 120
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -54,6 +54,15 @@
|
|||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"default": []
|
"default": []
|
||||||
|
},
|
||||||
|
"gitee": {
|
||||||
|
"type": "array",
|
||||||
|
"title": "Gitee API 密钥",
|
||||||
|
"description": "Gitee API 密钥",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -160,6 +169,25 @@
|
|||||||
"default": 600
|
"default": 600
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"gitee-ai-chat-completions": {
|
||||||
|
"type": "object",
|
||||||
|
"title": "Gitee AI API 请求配置",
|
||||||
|
"description": "仅可编辑 URL 和 超时时间,额外请求参数不支持可视化编辑,请到编辑器编辑",
|
||||||
|
"properties": {
|
||||||
|
"base-url": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "API URL"
|
||||||
|
},
|
||||||
|
"args": {
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
|
"timeout": {
|
||||||
|
"type": "number",
|
||||||
|
"title": "API 请求超时时间",
|
||||||
|
"default": 120
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -202,6 +230,73 @@
|
|||||||
"title": "请求运行器",
|
"title": "请求运行器",
|
||||||
"description": "设置请求运行器。值为local-agent时,使用内置默认运行器;支持插件扩展",
|
"description": "设置请求运行器。值为local-agent时,使用内置默认运行器;支持插件扩展",
|
||||||
"default": "local-agent"
|
"default": "local-agent"
|
||||||
|
},
|
||||||
|
"dify-service-api": {
|
||||||
|
"type": "object",
|
||||||
|
"title": "Dify Service API 配置",
|
||||||
|
"properties": {
|
||||||
|
"base-url": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "API URL",
|
||||||
|
"description": "Dify Service API 的 基础URL,可以在 Dify 应用 API 页面查看",
|
||||||
|
"default": "https://api.dify.ai/v1"
|
||||||
|
},
|
||||||
|
"app-type": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "应用类型",
|
||||||
|
"description": "支持 chat 和 workflow,chat:聊天助手(含高级编排)和 Agent;workflow:工作流;请填写下方对应的应用类型 API 参数",
|
||||||
|
"enum": ["chat", "workflow"],
|
||||||
|
"default": "chat"
|
||||||
|
},
|
||||||
|
"chat": {
|
||||||
|
"type": "object",
|
||||||
|
"title": "聊天助手 API 参数",
|
||||||
|
"properties": {
|
||||||
|
"api-key": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "API 密钥"
|
||||||
|
},
|
||||||
|
"timeout": {
|
||||||
|
"type": "number",
|
||||||
|
"title":"API 请求超时时间"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"agent": {
|
||||||
|
"type": "object",
|
||||||
|
"title": "Agent API 参数",
|
||||||
|
"properties": {
|
||||||
|
"api-key": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "API 密钥"
|
||||||
|
},
|
||||||
|
"timeout": {
|
||||||
|
"type": "number",
|
||||||
|
"title":"API 请求超时时间"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workflow": {
|
||||||
|
"type": "object",
|
||||||
|
"title": "工作流 API 参数",
|
||||||
|
"properties": {
|
||||||
|
"api-key": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "API 密钥"
|
||||||
|
},
|
||||||
|
"output-key": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "工作流输出键",
|
||||||
|
"description": "设置工作流输出键,用于从 Dify Workflow 结束节点返回的 JSON 数据中提取输出内容",
|
||||||
|
"default": "summary"
|
||||||
|
},
|
||||||
|
"timeout": {
|
||||||
|
"type": "number",
|
||||||
|
"title": "API 请求超时时间"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -47,17 +47,21 @@ const login = () => {
|
|||||||
user: user.value,
|
user: user.value,
|
||||||
password: password.value
|
password: password.value
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
if (res.data.data.token) {
|
if (res.data.code == 0) {
|
||||||
emit('success', '登录成功')
|
emit('success', '登录成功')
|
||||||
localStorage.setItem('user-token', res.data.data.token)
|
localStorage.setItem('user-token', res.data.data.token)
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
location.reload()
|
location.reload()
|
||||||
}, 1000)
|
}, 1000)
|
||||||
} else {
|
} else {
|
||||||
emit('error', '登录失败')
|
emit('error', res.data.msg)
|
||||||
}
|
}
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
emit('error', err.response.data.message)
|
if (err.response.data.msg) {
|
||||||
|
emit('error', err.response.data.msg)
|
||||||
|
} else {
|
||||||
|
emit('error', '登录失败')
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user