126 lines
3.7 KiB
Python
126 lines
3.7 KiB
Python
"""
|
||
从数据库读取URL并执行批量点击任务
|
||
支持单线程和并发两种模式
|
||
|
||
注意:此文件仅用于测试,生产环境请使用 main.py
|
||
"""
|
||
|
||
from loguru import logger
|
||
from task_executor import TaskExecutor
|
||
from config import Config
|
||
from data_manager import DataManager
|
||
import sys
|
||
import time
|
||
from datetime import datetime
|
||
from pathlib import Path
|
||
from concurrent.futures import ThreadPoolExecutor
|
||
|
||
# 配置日志(添加线程标识 + 文件输出)
|
||
logger.remove()
|
||
|
||
# 控制台输出
|
||
logger.add(
|
||
sys.stdout,
|
||
format="<green>{time:HH:mm:ss}</green> | <cyan>[{thread.name}]</cyan> | <level>{level: <8}</level> | <level>{message}</level>",
|
||
level="INFO"
|
||
)
|
||
|
||
# 文件输出
|
||
log_dir = Path("./logs")
|
||
log_dir.mkdir(exist_ok=True)
|
||
|
||
logger.add(
|
||
log_dir / "db_tasks_{time:YYYY-MM-DD}.log",
|
||
format="{time:YYYY-MM-DD HH:mm:ss} | [{thread.name}] | {level: <8} | {message}",
|
||
level="INFO",
|
||
rotation="00:00", # 每天凌晨切割日志
|
||
retention="30 days", # 保留30天
|
||
encoding="utf-8"
|
||
)
|
||
|
||
|
||
# ==================== 主程序 ====================
|
||
|
||
if __name__ == "__main__":
|
||
# 最大并发数(建议使用1,避免资源冲突)
|
||
MAX_WORKERS = 1
|
||
|
||
# 是否使用代理
|
||
USE_PROXY = True
|
||
|
||
# 测试数量(None = 所有)
|
||
TEST_LIMIT = None
|
||
|
||
logger.info("="*70)
|
||
logger.info(" 从数据库读取任务并执行点击")
|
||
logger.info("="*70)
|
||
logger.info(f"执行模式: {'并发' if MAX_WORKERS > 1 else '串行'}")
|
||
logger.info(f"最大并发数: {MAX_WORKERS}")
|
||
logger.info(f"使用代理: {USE_PROXY}")
|
||
logger.info(f"测试数量: {TEST_LIMIT or '全部'}")
|
||
logger.info("="*70)
|
||
logger.info("")
|
||
|
||
# 创建任务执行器
|
||
executor = TaskExecutor(
|
||
max_workers=MAX_WORKERS,
|
||
use_proxy=USE_PROXY
|
||
)
|
||
|
||
# 获取活跃站点任务
|
||
dm = DataManager()
|
||
active_sites = dm.get_active_urls()
|
||
|
||
if TEST_LIMIT:
|
||
active_sites = active_sites[:TEST_LIMIT]
|
||
|
||
logger.info(f"从数据库获取 {len(active_sites)} 个活跃站点")
|
||
logger.info("")
|
||
|
||
if not active_sites:
|
||
logger.warning("❗ 没有找到活跃站点,退出")
|
||
sys.exit(0)
|
||
|
||
# 执行任务
|
||
start_time = time.time()
|
||
results = []
|
||
|
||
if MAX_WORKERS == 1:
|
||
# 串行模式
|
||
logger.info("📊 串行模式,逐个执行...\n")
|
||
for idx, site_info in enumerate(active_sites, 1):
|
||
result = executor.execute_single_task(site_info, idx)
|
||
results.append(result)
|
||
logger.info("")
|
||
else:
|
||
# 并发模式
|
||
logger.info(f"🚀 并发模式,最大 {MAX_WORKERS} 个线程...\n")
|
||
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as pool:
|
||
future_to_site = {
|
||
pool.submit(executor.execute_single_task, site_info, idx): site_info
|
||
for idx, site_info in enumerate(active_sites, 1)
|
||
}
|
||
|
||
for future in future_to_site:
|
||
try:
|
||
result = future.result(timeout=300)
|
||
results.append(result)
|
||
except Exception as e:
|
||
logger.error(f"任务执行异常: {str(e)}")
|
||
|
||
# 统计结果
|
||
end_time = time.time()
|
||
elapsed_time = end_time - start_time
|
||
|
||
success_count = sum(1 for r in results if r['success'])
|
||
fail_count = len(results) - success_count
|
||
|
||
logger.info("="*70)
|
||
logger.info(" 执行结果")
|
||
logger.info("="*70)
|
||
logger.info(f"总任务数: {len(results)}")
|
||
logger.info(f"成功: {success_count}")
|
||
logger.info(f"失败: {fail_count}")
|
||
logger.info(f"总耗时: {elapsed_time:.2f}秒")
|
||
logger.info("="*70)
|