fix: 修复爬虫网络层、验证队列卡死及 API 500 错误

- 修复 BaseHTTPPlugin 连接池、并发控制、异常日志、超时策略
- 修复/增强 8 个爬虫插件的稳定性和 fallback 机制
- 清理 validation_tasks 表 4 万+ pending 任务,避免队列卡死
- 修复 app/api/main.py 缺失全局 app 实例导致的 500 错误
- 提升前端 Axios 超时到 120 秒,避免请求断开
- 修复插件统计持久化和调度器生命周期问题
This commit is contained in:
祀梦
2026-04-04 19:27:36 +08:00
parent 635c524a7e
commit f09a8e16c4
19 changed files with 505 additions and 161 deletions

View File

@@ -18,13 +18,18 @@ class SpeedXPlugin(BaseHTTPPlugin):
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks4.txt",
"https://raw.githubusercontent.com/TheSpeedX/SOCKS-List/master/socks5.txt",
]
self.fallback_urls = [
"https://cdn.jsdelivr.net/gh/TheSpeedX/SOCKS-List@master/http.txt",
"https://cdn.jsdelivr.net/gh/TheSpeedX/SOCKS-List@master/socks4.txt",
"https://cdn.jsdelivr.net/gh/TheSpeedX/SOCKS-List@master/socks5.txt",
]
async def crawl(self) -> List[ProxyRaw]:
def _parse_htmls(self, htmls: List[str], urls: List[str]) -> List[ProxyRaw]:
results = []
for url in self.urls:
html = await self.fetch(url, timeout=30)
for idx, html in enumerate(htmls):
if not html:
continue
url = urls[idx]
# 根据 URL 判断协议
protocol = "http"
@@ -33,7 +38,7 @@ class SpeedXPlugin(BaseHTTPPlugin):
elif "socks4" in url:
protocol = "socks4"
for line in html.split("\n"):
for line in html.splitlines():
line = line.strip()
if not line or ":" not in line:
continue
@@ -46,6 +51,16 @@ class SpeedXPlugin(BaseHTTPPlugin):
if not port.isdigit() or not (1 <= int(port) <= 65535):
continue
results.append(ProxyRaw(ip, int(port), protocol))
return results
async def crawl(self) -> List[ProxyRaw]:
htmls = await self.fetch_all(self.urls, timeout=15)
results = self._parse_htmls(htmls, self.urls)
if not results:
logger.warning(f"{self.display_name} GitHub 源全部返回空,尝试 jsdelivr fallback")
htmls = await self.fetch_all(self.fallback_urls, timeout=15)
results = self._parse_htmls(htmls, self.fallback_urls)
if results:
logger.info(f"{self.display_name} 解析完成,获取 {len(results)} 个潜在代理")