mirror of
https://github.com/tgbot-collection/YYeTsBot.git
synced 2025-11-25 03:15:05 +08:00
move folder, bump
This commit is contained in:
@@ -1,14 +1,14 @@
|
||||
requests==2.28.2
|
||||
pytelegrambotapi==4.9.0
|
||||
pytelegrambotapi==4.10.0
|
||||
beautifulsoup4==4.11.2
|
||||
tgbot-ping==1.0.7
|
||||
redis==4.4.2
|
||||
redis==4.5.1
|
||||
apscheduler==3.10.0
|
||||
pymongo==4.3.3
|
||||
tornado==6.2
|
||||
captcha==0.4
|
||||
passlib==1.7.4
|
||||
fakeredis==2.7.1
|
||||
fakeredis==2.8.0
|
||||
pytz==2022.7.1
|
||||
filetype==1.2.0
|
||||
requests[socks]
|
||||
|
||||
@@ -11,9 +11,10 @@ import logging
|
||||
import re
|
||||
import time
|
||||
|
||||
import pymongo
|
||||
from tqdm import tqdm
|
||||
|
||||
from common import Mongo
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
@@ -29,8 +30,7 @@ def extract_year(name: str) -> int:
|
||||
return r
|
||||
|
||||
|
||||
mongo_client = pymongo.MongoClient()
|
||||
col = mongo_client["zimuzu"]["yyets"]
|
||||
col = Mongo().client["zimuzu"]["yyets"]
|
||||
|
||||
data = col.find()
|
||||
|
||||
@@ -7,9 +7,10 @@
|
||||
|
||||
__author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
from common import Mongo
|
||||
from tqdm import tqdm
|
||||
|
||||
from common import Mongo
|
||||
|
||||
client = Mongo()
|
||||
user_col = client.db["users"]
|
||||
|
||||
|
||||
@@ -9,10 +9,9 @@ __author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
import time
|
||||
|
||||
import pymongo
|
||||
from common import Mongo
|
||||
|
||||
client = pymongo.MongoClient()
|
||||
from bson import ObjectId
|
||||
client = Mongo().client
|
||||
|
||||
comment = client["zimuzu"]["comment"] # date
|
||||
users = client["zimuzu"]["users"] # date
|
||||
@@ -7,9 +7,10 @@
|
||||
|
||||
__author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
import pymongo
|
||||
import os
|
||||
|
||||
import pymongo
|
||||
|
||||
|
||||
class Mongo:
|
||||
def __init__(self):
|
||||
|
||||
@@ -14,9 +14,8 @@ import sys
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
lib_path = pathlib.Path(__file__).parent.parent.resolve().as_posix()
|
||||
sys.path.append(lib_path)
|
||||
from tqdm import tqdm
|
||||
|
||||
from Mongo import DoubanMongoResource
|
||||
from tqdm import tqdm
|
||||
|
||||
m = DoubanMongoResource()
|
||||
|
||||
@@ -10,10 +10,9 @@ __author__ = "Benny <benny.think@gmail.com>"
|
||||
import random
|
||||
from datetime import date, timedelta
|
||||
|
||||
import pymongo
|
||||
from common import Mongo
|
||||
|
||||
client = pymongo.MongoClient()
|
||||
col = client["zimuzu"]["metrics"]
|
||||
col = Mongo().client["zimuzu"]["metrics"]
|
||||
|
||||
|
||||
def generate_date_series(start: str, end: str) -> list:
|
||||
@@ -7,15 +7,16 @@
|
||||
|
||||
__author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
import openpyxl
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import openpyxl
|
||||
|
||||
web_path = pathlib.Path(__file__).parent.parent.resolve().as_posix()
|
||||
sys.path.append(web_path)
|
||||
from Mongo import Mongo
|
||||
from tqdm import tqdm
|
||||
from utils import ts_date
|
||||
from yyetsweb.utils import ts_date
|
||||
|
||||
wb = openpyxl.open("aliyun.xlsx")
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
#!/usr/local/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
# BagAndDrag - convert_db.py
|
||||
# 1/12/21 18:24
|
||||
#
|
||||
|
||||
__author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
# convert to mongodb and con_sqlite
|
||||
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
import pymongo
|
||||
import pymysql
|
||||
|
||||
con_mysql = pymysql.Connect(host="127.0.0.1", user="root", password="root", charset="utf8mb4", database="yyets",
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
mongo_client = pymongo.MongoClient()
|
||||
|
||||
SIZE = 2000
|
||||
|
||||
|
||||
def clear_mongodb():
|
||||
mongo_client.drop_database("zimuzu")
|
||||
|
||||
|
||||
def mongodb_insert(data: List[dict]):
|
||||
db = mongo_client["zimuzu"]
|
||||
col = db["yyets"]
|
||||
# deserialize data.data
|
||||
inserted = []
|
||||
for i in data:
|
||||
api = json.loads(i["data"])
|
||||
views = api["data"]["info"]["views"]
|
||||
api["data"]["info"]["views"] = int(views)
|
||||
inserted.append(api)
|
||||
col.insert_many(inserted)
|
||||
|
||||
|
||||
def main():
|
||||
clear_mongodb()
|
||||
|
||||
mysql_cur = con_mysql.cursor()
|
||||
|
||||
mysql_cur.execute("SELECT * FROM resource")
|
||||
|
||||
while True:
|
||||
data = mysql_cur.fetchmany(SIZE)
|
||||
if data:
|
||||
mongodb_insert(data)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
con_mysql.close()
|
||||
mongo_client.close()
|
||||
@@ -1,45 +0,0 @@
|
||||
#!/usr/local/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
# YYeTsBot - dump_kv.py
|
||||
# 2/6/21 18:12
|
||||
#
|
||||
|
||||
__author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
import json
|
||||
import threading
|
||||
from concurrent.futures.thread import ThreadPoolExecutor
|
||||
|
||||
import requests
|
||||
|
||||
s = requests.Session()
|
||||
|
||||
with open("index.json", ) as f:
|
||||
ids = json.load(f)
|
||||
|
||||
chunk = [ids[x:x + 3000] for x in range(0, len(ids), 3000)]
|
||||
|
||||
|
||||
def download(c):
|
||||
print("running batch ", c[0])
|
||||
for i in c:
|
||||
data = s.get("https://yyets.dmesg.app/id={}".format(i)).json()
|
||||
with open(f"{i}.json", "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
threads = []
|
||||
for part in chunk:
|
||||
# Create 9 threads counting 10-19, 20-29, ... 90-99.
|
||||
thread = threading.Thread(target=download, args=(part,))
|
||||
threads.append(thread)
|
||||
|
||||
# Start them all
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
# Wait for all to complete
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/local/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
# YYeTsBot - load_from_kv.py
|
||||
# 2/6/21 18:27
|
||||
#
|
||||
|
||||
__author__ = "Benny <benny.think@gmail.com>"
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import pymongo
|
||||
|
||||
mongo_client = pymongo.MongoClient()
|
||||
|
||||
data_files = [i for i in os.listdir("data/") if i.endswith(".json")]
|
||||
col = mongo_client["zimuzu"]["yyets"]
|
||||
for data_file in data_files:
|
||||
with open(os.path.join("data", data_file)) as f:
|
||||
d = json.load(f)
|
||||
views = int(d["data"]["info"]["views"])
|
||||
d["data"]["info"]["views"] = views
|
||||
col.insert_one(d)
|
||||
@@ -22,6 +22,7 @@ from tornado.log import enable_pretty_logging
|
||||
|
||||
import dump_db
|
||||
from Mongo import OtherMongoResource, ResourceLatestMongoResource
|
||||
from commands.douban_sync import sync_douban
|
||||
from handler import (AnnouncementHandler, BlacklistHandler, CaptchaHandler,
|
||||
CategoryHandler, CommentChildHandler, CommentHandler,
|
||||
CommentNewestHandler, CommentReactionHandler,
|
||||
@@ -34,7 +35,6 @@ from handler import (AnnouncementHandler, BlacklistHandler, CaptchaHandler,
|
||||
ResourceHandler, ResourceLatestHandler,
|
||||
SpamProcessHandler, TopHandler, TwitterOAuth2LoginHandler,
|
||||
UserEmailHandler, UserHandler)
|
||||
from migration.douban_sync import sync_douban
|
||||
from utils import Cloudflare
|
||||
|
||||
enable_pretty_logging()
|
||||
|
||||
Reference in New Issue
Block a user