feat: fpw plugins, validation/crawl perf, WS stats, test DB isolation

- Add Free_Proxy_Website-style fpw_* plugins and register them
- Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting
- Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore
- Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait
- Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys
- WebSocket dashboard stats; ProxyList pool_filter and API alignment
- POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404
- pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB
- .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import

Made-with: Cursor
This commit is contained in:
祀梦
2026-04-05 13:39:19 +08:00
parent 92c7fa19e2
commit 0131c8b408
63 changed files with 2331 additions and 531 deletions

View File

@@ -101,17 +101,51 @@ class CrawlJob(Job):
result = await self.plugin_runner.run(plugin)
proxies: List[ProxyRaw] = result.proxies if result else []
if proxies and self.validator_pool:
await self.validator_pool.submit(proxies)
logger.info(f"CrawlJob {self.id}: submitted {len(proxies)} proxies for validation")
if proxies:
from app.core.db import transaction
from app.repositories.proxy_repo import ProxyRepository
try:
async with transaction() as db:
await ProxyRepository.upsert_many_from_crawl(db, proxies, 0)
logger.info(
f"CrawlJob {self.id}: persisted {len(proxies)} crawled proxies as pending"
)
except Exception as e:
logger.error(
f"CrawlJob {self.id}: failed to persist crawled proxies: {e}",
exc_info=True,
)
raise
if proxies and self.validator_pool:
from app.core.db import get_db as _get_db
from app.repositories.settings_repo import (
SettingsRepository,
DEFAULT_SETTINGS,
)
async with _get_db() as db:
db_settings = await SettingsRepository.get_all(db)
if db_settings.get(
"auto_validate_after_crawl",
DEFAULT_SETTINGS["auto_validate_after_crawl"],
):
await self.validator_pool.submit(proxies)
logger.info(
f"CrawlJob {self.id}: submitted {len(proxies)} proxies for immediate validation"
)
crawl_failed = bool(result and (result.failure_count > 0 or result.error))
payload = {
"plugin_id": self.plugin_id,
"proxy_count": len(proxies),
"crawl_failed": crawl_failed,
"error": result.error if result else None,
# 与持久化统计一致success_count=本次爬到的条数failure_count=是否失败(0/1)
"success_count": len(proxies),
"failure_count": result.failure_count if result else 0,
}
if result:
payload["success_count"] = result.success_count
payload["failure_count"] = result.failure_count
self._set_completed(payload)
return payload
@@ -133,7 +167,7 @@ class ValidateAllJob(Job):
repo = self.proxy_repo or ProxyRepository()
async with get_db() as db:
proxies = await repo.list_all(db)
proxies = await repo.list_for_validation(db)
if not proxies:
self._set_completed({"total": 0, "submitted": 0})

View File

@@ -65,9 +65,12 @@ class AsyncWorkerPool:
logger.info(f"{self.name} stopped")
async def submit(self, items: List[T]) -> None:
"""提交一批任务到队列(阻塞直到有空位,天然背压"""
"""提交一批任务到队列(优先 put_nowait队列满时再 await put"""
for item in items:
await self._queue.put(item)
try:
self._queue.put_nowait(item)
except asyncio.QueueFull:
await self._queue.put(item)
async def drain(self) -> None:
"""等待队列中所有任务被消费完毕"""