mirror of
https://github.com/tgbot-collection/YYeTsBot.git
synced 2025-11-25 11:29:38 +08:00
add zhuixinfan website for bot and website
This commit is contained in:
15
API.md
15
API.md
@@ -75,6 +75,17 @@
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
当数据库搜索不到资源时,会尝试从追新番和字幕侠搜索,返回如下
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": [],
|
||||||
|
"extra": {
|
||||||
|
"女人不杀生": "https://www.zimuxia.cn/portfolio/%e5%a5%b3%e4%ba%ba%e4%b8%8d%e6%9d%80%e7%94%9f"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
# Top
|
# Top
|
||||||
|
|
||||||
获取top信息,每类返回15条访问量最高的数据
|
获取top信息,每类返回15条访问量最高的数据
|
||||||
@@ -376,15 +387,13 @@
|
|||||||
|
|
||||||
* DELETE `/api/comment`,提交json数据
|
* DELETE `/api/comment`,提交json数据
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"comment_id": "60cab935e9f929e09c91392a"
|
"comment_id": "60cab935e9f929e09c91392a"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
不用关心comment_id是子评论还是父评论,会自动删除
|
|
||||||
|
|
||||||
|
不用关心comment_id是子评论还是父评论,会自动删除
|
||||||
|
|
||||||
返回被删除的数量,HTTP 200表示删除成功,404表示未找到这条留言
|
返回被删除的数量,HTTP 200表示删除成功,404表示未找到这条留言
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ SHARE_API = "http://got002.com/api/v1/static/resource/detail?code={code}"
|
|||||||
# fix
|
# fix
|
||||||
FIX_RESOURCE = "https://www.zimuxia.cn/portfolio/{name}"
|
FIX_RESOURCE = "https://www.zimuxia.cn/portfolio/{name}"
|
||||||
FIX_SEARCH = "https://www.zimuxia.cn/?s={kw}"
|
FIX_SEARCH = "https://www.zimuxia.cn/?s={kw}"
|
||||||
|
ZHUIXINFAN_SEARCH = "http://www.fanxinzhui.com/list?k={}"
|
||||||
|
ZHUIXINFAN_RESOURCE = "http://www.fanxinzhui.com{}"
|
||||||
# cloudflare worker
|
# cloudflare worker
|
||||||
WORKERS = "https://yyets.dmesg.app/resource.html?id={id}"
|
WORKERS = "https://yyets.dmesg.app/resource.html?id={id}"
|
||||||
|
|
||||||
@@ -37,4 +39,4 @@ MONGO = os.environ.get("MONGO") or "mongo"
|
|||||||
MAINTAINER = os.environ.get("MAINTAINER")
|
MAINTAINER = os.environ.get("MAINTAINER")
|
||||||
REPORT = os.environ.get("REPORT") or False
|
REPORT = os.environ.get("REPORT") or False
|
||||||
# This name must match class name, other wise this bot won't functional.
|
# This name must match class name, other wise this bot won't functional.
|
||||||
FANSUB_ORDER: str = os.environ.get("ORDER") or 'YYeTsOffline,ZimuxiaOnline'
|
FANSUB_ORDER: str = os.environ.get("ORDER") or 'YYeTsOffline,ZimuxiaOnline,ZhuixinfanOnline'
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import pickle
|
|||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import contextlib
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import pymongo
|
import pymongo
|
||||||
@@ -19,7 +19,8 @@ from bs4 import BeautifulSoup
|
|||||||
|
|
||||||
from config import (YYETS_SEARCH_URL, GET_USER, BASE_URL, SHARE_WEB,
|
from config import (YYETS_SEARCH_URL, GET_USER, BASE_URL, SHARE_WEB,
|
||||||
SHARE_URL, WORKERS, SHARE_API, USERNAME, PASSWORD,
|
SHARE_URL, WORKERS, SHARE_API, USERNAME, PASSWORD,
|
||||||
AJAX_LOGIN, REDIS, FANSUB_ORDER, FIX_SEARCH, MONGO)
|
AJAX_LOGIN, REDIS, FANSUB_ORDER, FIX_SEARCH, MONGO,
|
||||||
|
ZHUIXINFAN_SEARCH, ZHUIXINFAN_RESOURCE)
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(filename)s [%(levelname)s]: %(message)s')
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(filename)s [%(levelname)s]: %(message)s')
|
||||||
@@ -305,6 +306,51 @@ class ZimuxiaOffline(BaseFansub):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ZhuixinfanOnline(BaseFansub):
|
||||||
|
label = "zhuixinfan online"
|
||||||
|
|
||||||
|
def __get_search_html__(self, kw: str) -> str:
|
||||||
|
logging.info("[%s] Searching for %s", self.label, kw)
|
||||||
|
r = session.get(ZHUIXINFAN_SEARCH.format(kw))
|
||||||
|
r.close()
|
||||||
|
return r.text
|
||||||
|
|
||||||
|
def search_preview(self, search_text: str) -> dict:
|
||||||
|
# zhuixinfan online
|
||||||
|
html_text = self.__get_search_html__(search_text)
|
||||||
|
logging.info('[%s] Parsing html...', self.label)
|
||||||
|
soup = BeautifulSoup(html_text, 'html.parser')
|
||||||
|
link_list = soup.find_all("ul", class_="resource_list")
|
||||||
|
|
||||||
|
dict_result = {}
|
||||||
|
for li in link_list:
|
||||||
|
for link in li:
|
||||||
|
with contextlib.suppress(AttributeError):
|
||||||
|
name = link.dd.text
|
||||||
|
url = ZHUIXINFAN_RESOURCE.format(link.dd.a["href"])
|
||||||
|
url_hash = hashlib.sha1(url.encode('u8')).hexdigest()
|
||||||
|
dict_result[url_hash] = name
|
||||||
|
self.redis.hset(url_hash, mapping={"class": self.__class__.__name__, "url": url, "name": name})
|
||||||
|
|
||||||
|
dict_result["source"] = self.label
|
||||||
|
return dict_result
|
||||||
|
|
||||||
|
def search_result(self, resource_url: str) -> dict:
|
||||||
|
# zhuixinfan online
|
||||||
|
self.url = resource_url
|
||||||
|
self.data = self.__execute_search_result__()
|
||||||
|
return self.data
|
||||||
|
# {"all": dict_result, "share": share_link, "cnname": cnname}
|
||||||
|
|
||||||
|
def __execute_search_result__(self) -> dict:
|
||||||
|
logging.info("[%s] Loading detail page %s", self.label, self.url)
|
||||||
|
url_hash = hashlib.sha1(self.url.encode('u8')).hexdigest()
|
||||||
|
cnname = self.redis.hget(url_hash, "name")
|
||||||
|
# TODO
|
||||||
|
self.data = {"all": "不好意思,还没做呢……", "share": self.url, "cnname": cnname}
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
|
||||||
class FansubEntrance(BaseFansub):
|
class FansubEntrance(BaseFansub):
|
||||||
order = FANSUB_ORDER.split(",")
|
order = FANSUB_ORDER.split(",")
|
||||||
|
|
||||||
@@ -357,6 +403,6 @@ for sub_name in globals().copy():
|
|||||||
vars()[cmd_name] = m
|
vars()[cmd_name] = m
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
a = YYeTsOffline()
|
a = ZimuxiaOnline()
|
||||||
v = a.search_preview("逃避")
|
v = a.search_preview("女人为何")
|
||||||
print(v)
|
print(v)
|
||||||
|
|||||||
@@ -7,13 +7,11 @@
|
|||||||
|
|
||||||
__author__ = "Benny <benny.think@gmail.com>"
|
__author__ = "Benny <benny.think@gmail.com>"
|
||||||
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from datetime import timedelta, date, datetime
|
from datetime import timedelta, date
|
||||||
from bson.objectid import ObjectId
|
from bson.objectid import ObjectId
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
@@ -21,8 +19,9 @@ from passlib.handlers.pbkdf2 import pbkdf2_sha256
|
|||||||
|
|
||||||
from database import (AnnouncementResource, BlacklistResource, CommentResource, ResourceResource,
|
from database import (AnnouncementResource, BlacklistResource, CommentResource, ResourceResource,
|
||||||
GrafanaQueryResource, MetricsResource, NameResource, OtherResource,
|
GrafanaQueryResource, MetricsResource, NameResource, OtherResource,
|
||||||
TopResource, UserLikeResource, UserResource, CaptchaResource)
|
TopResource, UserLikeResource, UserResource, CaptchaResource, Redis)
|
||||||
from utils import ts_date
|
from utils import ts_date
|
||||||
|
from fansub import ZhuixinfanOnline, ZimuxiaOnline
|
||||||
|
|
||||||
mongo_host = os.getenv("mongo") or "localhost"
|
mongo_host = os.getenv("mongo") or "localhost"
|
||||||
|
|
||||||
@@ -293,6 +292,29 @@ class NameMongoResource(NameResource, Mongo):
|
|||||||
|
|
||||||
|
|
||||||
class ResourceMongoResource(ResourceResource, Mongo):
|
class ResourceMongoResource(ResourceResource, Mongo):
|
||||||
|
redis = Redis().r
|
||||||
|
|
||||||
|
def zhuixinfan_search(self, kw):
|
||||||
|
# export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
||||||
|
result = ZhuixinfanOnline().search_preview(kw)
|
||||||
|
result.pop("source")
|
||||||
|
json_result = {} # name as key, url as value
|
||||||
|
if result:
|
||||||
|
# this means we have search result, get it from redis cache with real name
|
||||||
|
for key, name in result.items():
|
||||||
|
json_result[name] = self.redis.hget(key, "url")
|
||||||
|
return json_result
|
||||||
|
|
||||||
|
def zimuxia_search(self, kw):
|
||||||
|
result = ZimuxiaOnline().search_preview(kw)
|
||||||
|
result.pop("source")
|
||||||
|
json_result = {} # name as key, url as value
|
||||||
|
if result:
|
||||||
|
# this means we have search result, get it from redis cache with real name
|
||||||
|
for key, name in result.items():
|
||||||
|
json_result[name] = self.redis.hget(key, "url")
|
||||||
|
return json_result
|
||||||
|
|
||||||
def get_resource_data(self, resource_id: int, username: str) -> dict:
|
def get_resource_data(self, resource_id: int, username: str) -> dict:
|
||||||
data = self.db["yyets"].find_one_and_update(
|
data = self.db["yyets"].find_one_and_update(
|
||||||
{"data.info.id": resource_id},
|
{"data.info.id": resource_id},
|
||||||
@@ -320,7 +342,17 @@ class ResourceMongoResource(ResourceResource, Mongo):
|
|||||||
]},
|
]},
|
||||||
projection
|
projection
|
||||||
)
|
)
|
||||||
return dict(data=list(data))
|
data = list(data)
|
||||||
|
returned = {}
|
||||||
|
if data:
|
||||||
|
returned = dict(data=data)
|
||||||
|
returned["extra"] = []
|
||||||
|
else:
|
||||||
|
extra = self.zhuixinfan_search(keyword) or self.zimuxia_search(keyword)
|
||||||
|
returned["data"] = []
|
||||||
|
returned["extra"] = extra
|
||||||
|
|
||||||
|
return returned
|
||||||
|
|
||||||
|
|
||||||
class TopMongoResource(TopResource, Mongo):
|
class TopMongoResource(TopResource, Mongo):
|
||||||
|
|||||||
1
yyetsweb/config.py
Symbolic link
1
yyetsweb/config.py
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../yyetsbot/config.py
|
||||||
@@ -27,7 +27,7 @@ class Redis:
|
|||||||
if os.getenv("DISABLE_REDIS"):
|
if os.getenv("DISABLE_REDIS"):
|
||||||
self.r = fakeredis.FakeStrictRedis()
|
self.r = fakeredis.FakeStrictRedis()
|
||||||
else:
|
else:
|
||||||
self.r = redis.StrictRedis(host="redis", decode_responses=True, db=2)
|
self.r = redis.StrictRedis(host="redis", decode_responses=True)
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.r.close()
|
self.r.close()
|
||||||
|
|||||||
1
yyetsweb/fansub.py
Symbolic link
1
yyetsweb/fansub.py
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../yyetsbot/fansub.py
|
||||||
Reference in New Issue
Block a user