feat: fpw plugins, validation/crawl perf, WS stats, test DB isolation

- Add Free_Proxy_Website-style fpw_* plugins and register them
- Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting
- Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore
- Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait
- Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys
- WebSocket dashboard stats; ProxyList pool_filter and API alignment
- POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404
- pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB
- .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import

Made-with: Cursor
This commit is contained in:
祀梦
2026-04-05 13:39:19 +08:00
parent 92c7fa19e2
commit 0131c8b408
63 changed files with 2331 additions and 531 deletions

View File

@@ -4,10 +4,14 @@
"""
import pytest
from tests.task_utils import poll_task_until_terminal
class TestFullWorkflow:
"""测试完整工作流"""
@pytest.mark.network
@pytest.mark.slow
@pytest.mark.asyncio
async def test_proxy_management_workflow(self, client):
"""测试代理管理完整工作流
@@ -35,12 +39,18 @@ class TestFullWorkflow:
# 3. 触发所有插件爬取
response = await client.post("/api/plugins/crawl-all")
assert response.status_code == 200
crawl_result = response.json()["data"]
task_id = response.json()["data"]["task_id"]
task_data = await poll_task_until_terminal(
client, task_id, max_rounds=400, interval=0.5
)
assert task_data is not None
assert task_data["status"] in ("completed", "failed", "cancelled")
# 4. 获取更新后的统计
response = await client.get("/api/proxies/stats")
updated_stats = response.json()["data"]
assert "total" in initial_stats and "total" in updated_stats
# 5. 导出代理(所有格式)
for fmt in ["csv", "txt", "json"]:
response = await client.get(f"/api/proxies/export/{fmt}")
@@ -50,6 +60,8 @@ class TestFullWorkflow:
response = await client.delete("/api/proxies/clean-invalid")
assert response.status_code == 200
@pytest.mark.network
@pytest.mark.slow
@pytest.mark.asyncio
async def test_plugin_management_workflow(self, client):
"""测试插件管理完整工作流
@@ -93,6 +105,12 @@ class TestFullWorkflow:
# 6. 触发爬取
response = await client.post(f"/api/plugins/{plugin_id}/crawl")
assert response.status_code == 200
crawl_task_id = response.json()["data"]["task_id"]
crawl_task = await poll_task_until_terminal(
client, crawl_task_id, max_rounds=140, interval=0.5
)
assert crawl_task is not None
assert crawl_task["status"] in ("completed", "failed", "cancelled")
@pytest.mark.asyncio
async def test_scheduler_workflow(self, client):