feat: fpw plugins, validation/crawl perf, WS stats, test DB isolation
- Add Free_Proxy_Website-style fpw_* plugins and register them - Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting - Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore - Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait - Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys - WebSocket dashboard stats; ProxyList pool_filter and API alignment - POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404 - pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB - .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import Made-with: Cursor
This commit is contained in:
@@ -109,21 +109,5 @@ class ProxyScrapePlugin(BaseHTTPPlugin):
|
||||
if results:
|
||||
logger.info(f"ProxyScrape 总计获取 {len(results)} 个代理")
|
||||
else:
|
||||
# Fallback:生成测试代理,确保在测试环境也能验证完整流程
|
||||
logger.warning("ProxyScrape 所有真实源均不可用,生成测试代理用于架构验证")
|
||||
results = self._generate_test_proxies()
|
||||
logger.warning("ProxyScrape 所有真实源均不可用,返回空列表")
|
||||
return results
|
||||
|
||||
def _generate_test_proxies(self) -> List[ProxyRaw]:
|
||||
"""生成测试代理数据,覆盖全协议类型,用于验证插件系统"""
|
||||
import random
|
||||
test_proxies = []
|
||||
protocols = ["http", "https", "socks4", "socks5"]
|
||||
for protocol in protocols:
|
||||
for _ in range(3):
|
||||
# 生成随机公网格式 IP(仅用于测试流程)
|
||||
ip = f"{random.randint(1, 223)}.{random.randint(0, 255)}.{random.randint(0, 255)}.{random.randint(1, 254)}"
|
||||
port = random.randint(1024, 65535)
|
||||
test_proxies.append(ProxyRaw(ip, port, protocol))
|
||||
logger.info(f"生成 {len(test_proxies)} 个测试代理 HTTP/HTTPS/SOCKS4/SOCKS5 各 3 个")
|
||||
return test_proxies
|
||||
|
||||
Reference in New Issue
Block a user