mirror of
https://github.com/NanmiCoder/MediaCrawler.git
synced 2025-11-25 03:15:17 +08:00
refactor: config update
This commit is contained in:
73
main.py
73
main.py
@@ -1,12 +1,12 @@
|
||||
# 声明:本代码仅供学习和研究目的使用。使用者应遵守以下原则:
|
||||
# 1. 不得用于任何商业用途。
|
||||
# 2. 使用时应遵守目标平台的使用条款和robots.txt规则。
|
||||
# 3. 不得进行大规模爬取或对平台造成运营干扰。
|
||||
# 4. 应合理控制请求频率,避免给目标平台带来不必要的负担。
|
||||
# 声明:本代码仅供学习和研究目的使用。使用者应遵守以下原则:
|
||||
# 1. 不得用于任何商业用途。
|
||||
# 2. 使用时应遵守目标平台的使用条款和robots.txt规则。
|
||||
# 3. 不得进行大规模爬取或对平台造成运营干扰。
|
||||
# 4. 应合理控制请求频率,避免给目标平台带来不必要的负担。
|
||||
# 5. 不得用于任何非法或不当的用途。
|
||||
#
|
||||
# 详细许可条款请参阅项目根目录下的LICENSE文件。
|
||||
# 使用本代码即表示您同意遵守上述原则和LICENSE中的所有条款。
|
||||
#
|
||||
# 详细许可条款请参阅项目根目录下的LICENSE文件。
|
||||
# 使用本代码即表示您同意遵守上述原则和LICENSE中的所有条款。
|
||||
|
||||
|
||||
import asyncio
|
||||
@@ -34,41 +34,46 @@ class CrawlerFactory:
|
||||
"bili": BilibiliCrawler,
|
||||
"wb": WeiboCrawler,
|
||||
"tieba": TieBaCrawler,
|
||||
"zhihu": ZhihuCrawler
|
||||
"zhihu": ZhihuCrawler,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def create_crawler(platform: str) -> AbstractCrawler:
|
||||
crawler_class = CrawlerFactory.CRAWLERS.get(platform)
|
||||
if not crawler_class:
|
||||
raise ValueError("Invalid Media Platform Currently only supported xhs or dy or ks or bili ...")
|
||||
raise ValueError(
|
||||
"Invalid Media Platform Currently only supported xhs or dy or ks or bili ..."
|
||||
)
|
||||
return crawler_class()
|
||||
|
||||
|
||||
crawler: Optional[AbstractCrawler] = None
|
||||
|
||||
|
||||
async def main():
|
||||
# Init crawler
|
||||
crawler: Optional[AbstractCrawler] = None
|
||||
global crawler
|
||||
|
||||
# parse cmd
|
||||
await cmd_arg.parse_cmd()
|
||||
|
||||
# init db
|
||||
if config.SAVE_DATA_OPTION in ["db", "sqlite"]:
|
||||
await db.init_db()
|
||||
|
||||
crawler = CrawlerFactory.create_crawler(platform=config.PLATFORM)
|
||||
await crawler.start()
|
||||
|
||||
|
||||
def cleanup():
|
||||
if crawler:
|
||||
asyncio.run(crawler.close())
|
||||
if config.SAVE_DATA_OPTION in ["db", "sqlite"]:
|
||||
asyncio.run(db.close())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
# parse cmd
|
||||
await cmd_arg.parse_cmd()
|
||||
|
||||
# init db
|
||||
if config.SAVE_DATA_OPTION in ["db", "sqlite"]:
|
||||
await db.init_db()
|
||||
|
||||
crawler = CrawlerFactory.create_crawler(platform=config.PLATFORM)
|
||||
await crawler.start()
|
||||
|
||||
finally:
|
||||
if crawler:
|
||||
await crawler.close()
|
||||
|
||||
if config.SAVE_DATA_OPTION in ["db", "sqlite"]:
|
||||
await db.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
# asyncio.run(main())
|
||||
asyncio.get_event_loop().run_until_complete(main())
|
||||
except KeyboardInterrupt:
|
||||
print("\n[main] Caught keyboard interrupt, exiting.")
|
||||
sys.exit()
|
||||
finally:
|
||||
cleanup()
|
||||
|
||||
Reference in New Issue
Block a user