爬虫
This commit is contained in:
72
schoolNewsCrawler/crawler/RmrbHotPoint.py
Normal file
72
schoolNewsCrawler/crawler/RmrbHotPoint.py
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
人民日报热点排行爬虫命令行工具
|
||||
用法: python RmrbHotPoint.py
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path to import crawler
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from crawler.RmrbCrawler import RmrbCrawler
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def main():
|
||||
"""主函数"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='人民日报热点排行获取工具',
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
示例:
|
||||
python RmrbHotPoint.py
|
||||
"""
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
# 创建爬虫实例
|
||||
logger.info("开始获取人民日报热点排行")
|
||||
crawler = RmrbCrawler()
|
||||
|
||||
# 执行获取热点排行
|
||||
result = crawler.hotPointRank()
|
||||
|
||||
# 输出JSON结果
|
||||
output = {
|
||||
"code": result.code,
|
||||
"message": result.message,
|
||||
"success": result.success,
|
||||
"data": None,
|
||||
"dataList": [item.dict() for item in result.dataList] if result.dataList else []
|
||||
}
|
||||
|
||||
print(json.dumps(output, ensure_ascii=False, indent=2))
|
||||
|
||||
# 关闭爬虫
|
||||
crawler.close()
|
||||
|
||||
# 退出码: 成功=0, 失败=1
|
||||
sys.exit(0 if result.success else 1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"执行失败: {str(e)}")
|
||||
error_output = {
|
||||
"code": 500,
|
||||
"message": f"执行失败: {str(e)}",
|
||||
"success": False,
|
||||
"data": None,
|
||||
"dataList": []
|
||||
}
|
||||
print(json.dumps(error_output, ensure_ascii=False, indent=2))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user