fix: 修复爬虫网络层、验证队列卡死及 API 500 错误
- 修复 BaseHTTPPlugin 连接池、并发控制、异常日志、超时策略 - 修复/增强 8 个爬虫插件的稳定性和 fallback 机制 - 清理 validation_tasks 表 4 万+ pending 任务,避免队列卡死 - 修复 app/api/main.py 缺失全局 app 实例导致的 500 错误 - 提升前端 Axios 超时到 120 秒,避免请求断开 - 修复插件统计持久化和调度器生命周期问题
This commit is contained in:
@@ -13,27 +13,34 @@ class Fate0Plugin(BaseHTTPPlugin):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.urls = ["https://raw.githubusercontent.com/fate0/proxylist/master/proxy.list"]
|
||||
self.urls = [
|
||||
"https://raw.githubusercontent.com/fate0/proxylist/master/proxy.list",
|
||||
"https://cdn.jsdelivr.net/gh/fate0/proxylist@master/proxy.list",
|
||||
]
|
||||
|
||||
async def crawl(self) -> List[ProxyRaw]:
|
||||
results = []
|
||||
# 顺序 fetch,带 fallback
|
||||
for url in self.urls:
|
||||
html = await self.fetch(url, timeout=30)
|
||||
if not html:
|
||||
if html:
|
||||
break
|
||||
if not html:
|
||||
logger.warning(f"{self.display_name} 所有源均不可用")
|
||||
return results
|
||||
for line in html.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
data = json.loads(line)
|
||||
ip = data.get("host")
|
||||
port = data.get("port")
|
||||
protocol = data.get("type", "http")
|
||||
if ip and port:
|
||||
results.append(ProxyRaw(ip, int(port), protocol))
|
||||
except Exception:
|
||||
continue
|
||||
for line in html.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
data = json.loads(line)
|
||||
ip = data.get("host")
|
||||
port = data.get("port")
|
||||
protocol = data.get("type", "http")
|
||||
if ip and port:
|
||||
results.append(ProxyRaw(ip, int(port), protocol))
|
||||
except Exception:
|
||||
continue
|
||||
if results:
|
||||
logger.info(f"{self.display_name} 解析完成,获取 {len(results)} 个潜在代理")
|
||||
return results
|
||||
|
||||
Reference in New Issue
Block a user