2025-11-18 12:24:02 +08:00
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
# Copyright (c) 2025 relakkes@gmail.com
|
|
|
|
|
|
#
|
|
|
|
|
|
# This file is part of MediaCrawler project.
|
|
|
|
|
|
# Repository: https://github.com/NanmiCoder/MediaCrawler/blob/main/main.py
|
|
|
|
|
|
# GitHub: https://github.com/NanmiCoder
|
|
|
|
|
|
# Licensed under NON-COMMERCIAL LEARNING LICENSE 1.1
|
|
|
|
|
|
#
|
|
|
|
|
|
|
2025-07-18 23:26:52 +08:00
|
|
|
|
# 声明:本代码仅供学习和研究目的使用。使用者应遵守以下原则:
|
|
|
|
|
|
# 1. 不得用于任何商业用途。
|
|
|
|
|
|
# 2. 使用时应遵守目标平台的使用条款和robots.txt规则。
|
|
|
|
|
|
# 3. 不得进行大规模爬取或对平台造成运营干扰。
|
|
|
|
|
|
# 4. 应合理控制请求频率,避免给目标平台带来不必要的负担。
|
2024-10-20 00:43:25 +08:00
|
|
|
|
# 5. 不得用于任何非法或不当的用途。
|
2025-07-18 23:26:52 +08:00
|
|
|
|
#
|
|
|
|
|
|
# 详细许可条款请参阅项目根目录下的LICENSE文件。
|
|
|
|
|
|
# 使用本代码即表示您同意遵守上述原则和LICENSE中的所有条款。
|
2024-10-20 00:43:25 +08:00
|
|
|
|
|
|
|
|
|
|
|
2023-07-29 15:35:40 +08:00
|
|
|
|
import asyncio
|
|
|
|
|
|
import sys
|
2025-11-17 12:21:53 +08:00
|
|
|
|
import signal
|
2025-06-25 23:22:39 +08:00
|
|
|
|
from typing import Optional
|
2023-06-09 20:41:53 +08:00
|
|
|
|
|
2024-06-09 09:35:52 +08:00
|
|
|
|
import cmd_arg
|
2023-06-16 19:35:43 +08:00
|
|
|
|
import config
|
2025-09-08 01:14:31 +08:00
|
|
|
|
from database import db
|
2023-12-03 00:30:10 +08:00
|
|
|
|
from base.base_crawler import AbstractCrawler
|
|
|
|
|
|
from media_platform.bilibili import BilibiliCrawler
|
2023-06-09 20:41:53 +08:00
|
|
|
|
from media_platform.douyin import DouYinCrawler
|
2023-11-24 00:04:33 +08:00
|
|
|
|
from media_platform.kuaishou import KuaishouCrawler
|
2024-08-05 18:51:51 +08:00
|
|
|
|
from media_platform.tieba import TieBaCrawler
|
2023-12-24 17:57:48 +08:00
|
|
|
|
from media_platform.weibo import WeiboCrawler
|
2023-12-25 00:02:11 +08:00
|
|
|
|
from media_platform.xhs import XiaoHongShuCrawler
|
2024-09-08 00:00:04 +08:00
|
|
|
|
from media_platform.zhihu import ZhihuCrawler
|
2025-11-02 13:25:31 +08:00
|
|
|
|
from tools.async_file_writer import AsyncFileWriter
|
|
|
|
|
|
from var import crawler_type_var
|
2023-06-09 20:41:53 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class CrawlerFactory:
|
2023-12-03 00:30:10 +08:00
|
|
|
|
CRAWLERS = {
|
|
|
|
|
|
"xhs": XiaoHongShuCrawler,
|
|
|
|
|
|
"dy": DouYinCrawler,
|
|
|
|
|
|
"ks": KuaishouCrawler,
|
2023-12-24 17:57:48 +08:00
|
|
|
|
"bili": BilibiliCrawler,
|
2024-08-05 18:51:51 +08:00
|
|
|
|
"wb": WeiboCrawler,
|
2024-09-08 00:00:04 +08:00
|
|
|
|
"tieba": TieBaCrawler,
|
2025-07-18 23:26:52 +08:00
|
|
|
|
"zhihu": ZhihuCrawler,
|
2023-12-03 00:30:10 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-06-09 20:41:53 +08:00
|
|
|
|
@staticmethod
|
2023-12-03 00:30:10 +08:00
|
|
|
|
def create_crawler(platform: str) -> AbstractCrawler:
|
|
|
|
|
|
crawler_class = CrawlerFactory.CRAWLERS.get(platform)
|
|
|
|
|
|
if not crawler_class:
|
2025-07-18 23:26:52 +08:00
|
|
|
|
raise ValueError(
|
|
|
|
|
|
"Invalid Media Platform Currently only supported xhs or dy or ks or bili ..."
|
|
|
|
|
|
)
|
2023-12-03 00:30:10 +08:00
|
|
|
|
return crawler_class()
|
2023-06-09 20:41:53 +08:00
|
|
|
|
|
2025-07-18 23:26:52 +08:00
|
|
|
|
|
|
|
|
|
|
crawler: Optional[AbstractCrawler] = None
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-09-06 04:10:20 +08:00
|
|
|
|
# persist-1<persist1@126.com>
|
2025-09-08 01:14:31 +08:00
|
|
|
|
# 原因:增加 --init_db 功能,用于数据库初始化。
|
2025-09-06 04:10:20 +08:00
|
|
|
|
# 副作用:无
|
|
|
|
|
|
# 回滚策略:还原此文件。
|
2023-06-09 20:41:53 +08:00
|
|
|
|
async def main():
|
2025-07-13 10:42:15 +08:00
|
|
|
|
# Init crawler
|
2025-07-18 23:26:52 +08:00
|
|
|
|
global crawler
|
2025-06-25 23:22:39 +08:00
|
|
|
|
|
2025-07-18 23:26:52 +08:00
|
|
|
|
# parse cmd
|
2025-09-06 04:10:20 +08:00
|
|
|
|
args = await cmd_arg.parse_cmd()
|
2024-08-05 18:51:51 +08:00
|
|
|
|
|
2025-07-18 23:26:52 +08:00
|
|
|
|
# init db
|
2025-09-06 04:10:20 +08:00
|
|
|
|
if args.init_db:
|
|
|
|
|
|
await db.init_db(args.init_db)
|
|
|
|
|
|
print(f"Database {args.init_db} initialized successfully.")
|
|
|
|
|
|
return # Exit the main function cleanly
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-07-18 23:26:52 +08:00
|
|
|
|
|
|
|
|
|
|
crawler = CrawlerFactory.create_crawler(platform=config.PLATFORM)
|
|
|
|
|
|
await crawler.start()
|
2023-07-24 20:59:43 +08:00
|
|
|
|
|
2025-11-02 13:25:31 +08:00
|
|
|
|
# Generate wordcloud after crawling is complete
|
|
|
|
|
|
# Only for JSON save mode
|
|
|
|
|
|
if config.SAVE_DATA_OPTION == "json" and config.ENABLE_GET_WORDCLOUD:
|
|
|
|
|
|
try:
|
|
|
|
|
|
file_writer = AsyncFileWriter(
|
|
|
|
|
|
platform=config.PLATFORM,
|
|
|
|
|
|
crawler_type=crawler_type_var.get()
|
|
|
|
|
|
)
|
|
|
|
|
|
await file_writer.generate_wordcloud_from_comments()
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
print(f"Error generating wordcloud: {e}")
|
|
|
|
|
|
|
2023-06-09 20:41:53 +08:00
|
|
|
|
|
2025-11-17 12:21:53 +08:00
|
|
|
|
async def async_cleanup():
|
|
|
|
|
|
"""异步清理函数,用于处理CDP浏览器等异步资源"""
|
|
|
|
|
|
global crawler
|
|
|
|
|
|
if crawler:
|
|
|
|
|
|
# 检查并清理CDP浏览器
|
|
|
|
|
|
if hasattr(crawler, 'cdp_manager') and crawler.cdp_manager:
|
|
|
|
|
|
try:
|
|
|
|
|
|
await crawler.cdp_manager.cleanup(force=True) # 强制清理浏览器进程
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
# 只在非预期错误时打印
|
|
|
|
|
|
error_msg = str(e).lower()
|
|
|
|
|
|
if "closed" not in error_msg and "disconnected" not in error_msg:
|
|
|
|
|
|
print(f"[Main] 清理CDP浏览器时出错: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
# 检查并清理标准浏览器上下文(仅在非CDP模式下)
|
|
|
|
|
|
elif hasattr(crawler, 'browser_context') and crawler.browser_context:
|
|
|
|
|
|
try:
|
|
|
|
|
|
# 检查上下文是否仍然打开
|
|
|
|
|
|
if hasattr(crawler.browser_context, 'pages'):
|
|
|
|
|
|
await crawler.browser_context.close()
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
# 只在非预期错误时打印
|
|
|
|
|
|
error_msg = str(e).lower()
|
|
|
|
|
|
if "closed" not in error_msg and "disconnected" not in error_msg:
|
|
|
|
|
|
print(f"[Main] 关闭浏览器上下文时出错: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
# 关闭数据库连接
|
2025-07-18 23:26:52 +08:00
|
|
|
|
if config.SAVE_DATA_OPTION in ["db", "sqlite"]:
|
2025-11-17 12:21:53 +08:00
|
|
|
|
await db.close()
|
2023-06-09 20:41:53 +08:00
|
|
|
|
|
2025-11-17 12:21:53 +08:00
|
|
|
|
def cleanup():
|
|
|
|
|
|
"""同步清理函数"""
|
|
|
|
|
|
try:
|
|
|
|
|
|
# 创建新的事件循环来执行异步清理
|
|
|
|
|
|
loop = asyncio.new_event_loop()
|
|
|
|
|
|
asyncio.set_event_loop(loop)
|
|
|
|
|
|
loop.run_until_complete(async_cleanup())
|
|
|
|
|
|
loop.close()
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
print(f"[Main] 清理时出错: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def signal_handler(signum, _frame):
|
|
|
|
|
|
"""信号处理器,处理Ctrl+C等中断信号"""
|
|
|
|
|
|
print(f"\n[Main] 收到中断信号 {signum},正在清理资源...")
|
|
|
|
|
|
cleanup()
|
|
|
|
|
|
sys.exit(0)
|
2025-07-18 23:26:52 +08:00
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2025-11-17 12:21:53 +08:00
|
|
|
|
# 注册信号处理器
|
|
|
|
|
|
signal.signal(signal.SIGINT, signal_handler) # Ctrl+C
|
|
|
|
|
|
signal.signal(signal.SIGTERM, signal_handler) # 终止信号
|
|
|
|
|
|
|
2023-06-09 20:41:53 +08:00
|
|
|
|
try:
|
2023-07-29 15:35:40 +08:00
|
|
|
|
asyncio.get_event_loop().run_until_complete(main())
|
2025-11-17 12:21:53 +08:00
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
|
|
print("\n[Main] 收到键盘中断,正在清理资源...")
|
2025-07-18 23:26:52 +08:00
|
|
|
|
finally:
|
|
|
|
|
|
cleanup()
|