mirror of
https://github.com/harry0703/MoneyPrinterTurbo.git
synced 2025-11-25 03:15:04 +08:00
feat: add provider ai pollinations (#667)
* feat: add provider ai pollinations * update: enter line --------- Co-authored-by: Diep Do <diepchiaser@gmail.com>
This commit is contained in:
@@ -66,7 +66,7 @@ Picwish focuses on the **image processing field**, providing a rich set of **ima
|
||||
- [x] Supports **background music**, either random or specified music files, with adjustable `background music volume`
|
||||
- [x] Video material sources are **high-definition** and **royalty-free**, and you can also use your own **local materials**
|
||||
- [x] Supports integration with various models such as **OpenAI**, **Moonshot**, **Azure**, **gpt4free**, **one-api**,
|
||||
**Qwen**, **Google Gemini**, **Ollama**, **DeepSeek**, **ERNIE** and more
|
||||
**Qwen**, **Google Gemini**, **Ollama**, **DeepSeek**, **ERNIE**, **pollinations** and more
|
||||
- For users in China, it is recommended to use **DeepSeek** or **Moonshot** as the large model provider (directly accessible in China, no VPN needed. Free credits upon registration, generally sufficient for use)
|
||||
|
||||
❓[How to Use the Free OpenAI GPT-3.5 Model?](https://github.com/harry0703/MoneyPrinterTurbo/blob/main/README-en.md#common-questions-)
|
||||
|
||||
@@ -59,7 +59,7 @@
|
||||
- [x] 支持 **背景音乐**,随机或者指定音乐文件,可设置`背景音乐音量`
|
||||
- [x] 视频素材来源 **高清**,而且 **无版权**,也可以使用自己的 **本地素材**
|
||||
- [x] 支持 **OpenAI**、**Moonshot**、**Azure**、**gpt4free**、**one-api**、**通义千问**、**Google Gemini**、**Ollama**、
|
||||
**DeepSeek**、 **文心一言** 等多种模型接入
|
||||
**DeepSeek**、 **文心一言**, **pollinations** 等多种模型接入
|
||||
- 中国用户建议使用 **DeepSeek** 或 **Moonshot** 作为大模型提供商(国内可直接访问,不需要VPN。注册就送额度,基本够用)
|
||||
|
||||
### 后期计划 📅
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import requests
|
||||
from typing import List
|
||||
|
||||
import g4f
|
||||
@@ -82,23 +83,61 @@ def _generate_response(prompt: str) -> str:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: secret_key is not set, please set it in the config.toml file."
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
"llm_provider is not set, please set it in the config.toml file."
|
||||
)
|
||||
elif llm_provider == "pollinations":
|
||||
try:
|
||||
base_url = config.app.get("pollinations_base_url", "")
|
||||
if not base_url:
|
||||
base_url = "https://text.pollinations.ai/openai"
|
||||
model_name = config.app.get("pollinations_model_name", "openai-fast")
|
||||
|
||||
# Prepare the payload
|
||||
payload = {
|
||||
"model": model_name,
|
||||
"messages": [
|
||||
{"role": "user", "content": prompt}
|
||||
],
|
||||
"seed": 101 # Optional but helps with reproducibility
|
||||
}
|
||||
|
||||
# Optional parameters if configured
|
||||
if config.app.get("pollinations_private"):
|
||||
payload["private"] = True
|
||||
if config.app.get("pollinations_referrer"):
|
||||
payload["referrer"] = config.app.get("pollinations_referrer")
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
# Make the API request
|
||||
response = requests.post(base_url, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if result and "choices" in result and len(result["choices"]) > 0:
|
||||
content = result["choices"][0]["message"]["content"]
|
||||
return content.replace("\n", "")
|
||||
else:
|
||||
raise Exception(f"[{llm_provider}] returned an invalid response format")
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise Exception(f"[{llm_provider}] request failed: {str(e)}")
|
||||
except Exception as e:
|
||||
raise Exception(f"[{llm_provider}] error: {str(e)}")
|
||||
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: api_key is not set, please set it in the config.toml file."
|
||||
)
|
||||
if not model_name:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: model_name is not set, please set it in the config.toml file."
|
||||
)
|
||||
if not base_url:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: base_url is not set, please set it in the config.toml file."
|
||||
)
|
||||
if llm_provider not in ["pollinations", "ollama"]: # Skip validation for providers that don't require API key
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: api_key is not set, please set it in the config.toml file."
|
||||
)
|
||||
if not model_name:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: model_name is not set, please set it in the config.toml file."
|
||||
)
|
||||
if not base_url:
|
||||
raise ValueError(
|
||||
f"{llm_provider}: base_url is not set, please set it in the config.toml file."
|
||||
)
|
||||
|
||||
if llm_provider == "qwen":
|
||||
import dashscope
|
||||
@@ -172,8 +211,6 @@ def _generate_response(prompt: str) -> str:
|
||||
return generated_text
|
||||
|
||||
if llm_provider == "cloudflare":
|
||||
import requests
|
||||
|
||||
response = requests.post(
|
||||
f"https://api.cloudflare.com/client/v4/accounts/{account_id}/ai/run/{model_name}",
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
@@ -192,20 +229,15 @@ def _generate_response(prompt: str) -> str:
|
||||
return result["result"]["response"]
|
||||
|
||||
if llm_provider == "ernie":
|
||||
import requests
|
||||
|
||||
params = {
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": api_key,
|
||||
"client_secret": secret_key,
|
||||
}
|
||||
access_token = (
|
||||
requests.post(
|
||||
"https://aip.baidubce.com/oauth/2.0/token", params=params
|
||||
)
|
||||
.json()
|
||||
.get("access_token")
|
||||
response = requests.post(
|
||||
"https://aip.baidubce.com/oauth/2.0/token",
|
||||
params={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": api_key,
|
||||
"client_secret": secret_key,
|
||||
}
|
||||
)
|
||||
access_token = response.json().get("access_token")
|
||||
url = f"{base_url}?access_token={access_token}"
|
||||
|
||||
payload = json.dumps(
|
||||
@@ -409,3 +441,4 @@ if __name__ == "__main__":
|
||||
)
|
||||
print("######################")
|
||||
print(search_terms)
|
||||
|
||||
@@ -32,6 +32,15 @@ pixabay_api_keys = []
|
||||
# ernie (文心一言)
|
||||
llm_provider = "openai"
|
||||
|
||||
########## Pollinations AI Settings
|
||||
# Visit https://pollinations.ai/ to learn more
|
||||
# API Key is optional - leave empty for public access
|
||||
pollinations_api_key = ""
|
||||
# Default base URL for Pollinations API
|
||||
pollinations_base_url = "https://pollinations.ai/api/v1"
|
||||
# Default model for text generation
|
||||
pollinations_model_name = "openai-fast"
|
||||
|
||||
########## Ollama Settings
|
||||
# No need to set it unless you want to use your own proxy
|
||||
ollama_base_url = ""
|
||||
|
||||
@@ -12,4 +12,5 @@ g4f==0.5.2.2
|
||||
azure-cognitiveservices-speech==1.41.1
|
||||
redis==5.2.0
|
||||
python-multipart==0.0.19
|
||||
pyyaml
|
||||
pyyaml
|
||||
requests>=2.31.0
|
||||
|
||||
BIN
resource/fonts/Charm-Bold.ttf
Normal file
BIN
resource/fonts/Charm-Bold.ttf
Normal file
Binary file not shown.
BIN
resource/fonts/Charm-Regular.ttf
Normal file
BIN
resource/fonts/Charm-Regular.ttf
Normal file
Binary file not shown.
@@ -237,6 +237,7 @@ if not config.app.get("hide_config", False):
|
||||
"OneAPI",
|
||||
"Cloudflare",
|
||||
"ERNIE",
|
||||
"Pollinations",
|
||||
]
|
||||
saved_llm_provider = config.app.get("llm_provider", "OpenAI").lower()
|
||||
saved_llm_provider_index = 0
|
||||
@@ -381,6 +382,17 @@ if not config.app.get("hide_config", False):
|
||||
- **Base Url**: 填写 **请求地址** [点击查看文档](https://cloud.baidu.com/doc/WENXINWORKSHOP/s/jlil56u11#%E8%AF%B7%E6%B1%82%E8%AF%B4%E6%98%8E)
|
||||
"""
|
||||
|
||||
if llm_provider == "pollinations":
|
||||
if not llm_model_name:
|
||||
llm_model_name = "default"
|
||||
with llm_helper:
|
||||
tips = """
|
||||
##### Pollinations AI Configuration
|
||||
- **API Key**: Optional - Leave empty for public access
|
||||
- **Base Url**: Default is https://text.pollinations.ai/openai
|
||||
- **Model Name**: Use 'openai-fast' or specify a model name
|
||||
"""
|
||||
|
||||
if tips and config.ui["language"] == "zh":
|
||||
st.warning(
|
||||
"中国用户建议使用 **DeepSeek** 或 **Moonshot** 作为大模型提供商\n- 国内可直接访问,不需要VPN \n- 注册就送额度,基本够用"
|
||||
|
||||
Reference in New Issue
Block a user