feat: fpw plugins, validation/crawl perf, WS stats, test DB isolation
- Add Free_Proxy_Website-style fpw_* plugins and register them - Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting - Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore - Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait - Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys - WebSocket dashboard stats; ProxyList pool_filter and API alignment - POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404 - pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB - .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import Made-with: Cursor
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
"""全局配置 - 使用 Pydantic Settings 支持环境变量和 .env 文件"""
|
||||
import os
|
||||
from typing import List
|
||||
from pydantic import AliasChoices, Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
@@ -11,8 +12,11 @@ class Settings(BaseSettings):
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
# 数据库配置
|
||||
db_path: str = "db/proxies.sqlite"
|
||||
# 数据库配置(环境变量 PROXYPOOL_DB_PATH 优先,供 pytest 与生产隔离)
|
||||
db_path: str = Field(
|
||||
default="db/proxies.sqlite",
|
||||
validation_alias=AliasChoices("PROXYPOOL_DB_PATH", "DB_PATH", "db_path"),
|
||||
)
|
||||
|
||||
# API 服务配置
|
||||
host: str = "127.0.0.1"
|
||||
@@ -31,6 +35,9 @@ class Settings(BaseSettings):
|
||||
log_level: str = "INFO"
|
||||
log_dir: str = "logs"
|
||||
|
||||
# WebSocket:统计广播间隔(秒);无连接时不查库
|
||||
ws_stats_interval_seconds: int = 1
|
||||
|
||||
# 导出配置
|
||||
export_max_records: int = 10000
|
||||
|
||||
|
||||
@@ -54,10 +54,23 @@ async def init_db():
|
||||
await db.execute("UPDATE proxies SET created_at = CURRENT_TIMESTAMP WHERE created_at IS NULL")
|
||||
logger.info("Migrated: added created_at column")
|
||||
|
||||
# 迁移:validated 0=待验证 1=已验证入池(参与分数维护)
|
||||
try:
|
||||
await db.execute("SELECT validated FROM proxies LIMIT 1")
|
||||
except Exception:
|
||||
await db.execute(
|
||||
"ALTER TABLE proxies ADD COLUMN validated INTEGER NOT NULL DEFAULT 0"
|
||||
)
|
||||
await db.execute(
|
||||
"UPDATE proxies SET validated = 1 WHERE score > 0"
|
||||
)
|
||||
logger.info("Migrated: added validated column")
|
||||
|
||||
await db.execute("CREATE INDEX IF NOT EXISTS idx_score ON proxies(score)")
|
||||
await db.execute("CREATE INDEX IF NOT EXISTS idx_protocol ON proxies(protocol)")
|
||||
await db.execute("CREATE INDEX IF NOT EXISTS idx_last_check ON proxies(last_check)")
|
||||
await db.execute("CREATE INDEX IF NOT EXISTS idx_ip_port ON proxies(ip, port)")
|
||||
await db.execute("CREATE INDEX IF NOT EXISTS idx_validated ON proxies(validated)")
|
||||
|
||||
# 插件设置表
|
||||
await db.execute("""
|
||||
@@ -94,6 +107,10 @@ async def init_db():
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
# 仅移除已废弃设置键,不碰 proxies 表数据
|
||||
await db.execute(
|
||||
"DELETE FROM settings WHERE key IN ('crawl_timeout', 'max_retries')"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
logger.info("Database initialized")
|
||||
@@ -112,6 +129,19 @@ async def get_db() -> AsyncIterator[aiosqlite.Connection]:
|
||||
await db.close()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db_connection() -> AsyncIterator[aiosqlite.Connection]:
|
||||
"""单连接贯穿「读库 → await 网络 I/O → 写库」,减少验证 worker 每条代理两次 connect。"""
|
||||
ensure_db_dir()
|
||||
db = await aiosqlite.connect(DB_PATH)
|
||||
try:
|
||||
await db.execute("PRAGMA journal_mode=WAL")
|
||||
await db.execute("PRAGMA synchronous=NORMAL")
|
||||
yield db
|
||||
finally:
|
||||
await db.close()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def transaction() -> AsyncIterator[aiosqlite.Connection]:
|
||||
"""获取带有显式事务控制的数据库连接
|
||||
|
||||
@@ -101,17 +101,51 @@ class CrawlJob(Job):
|
||||
result = await self.plugin_runner.run(plugin)
|
||||
proxies: List[ProxyRaw] = result.proxies if result else []
|
||||
|
||||
if proxies and self.validator_pool:
|
||||
await self.validator_pool.submit(proxies)
|
||||
logger.info(f"CrawlJob {self.id}: submitted {len(proxies)} proxies for validation")
|
||||
if proxies:
|
||||
from app.core.db import transaction
|
||||
from app.repositories.proxy_repo import ProxyRepository
|
||||
|
||||
try:
|
||||
async with transaction() as db:
|
||||
await ProxyRepository.upsert_many_from_crawl(db, proxies, 0)
|
||||
logger.info(
|
||||
f"CrawlJob {self.id}: persisted {len(proxies)} crawled proxies as pending"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"CrawlJob {self.id}: failed to persist crawled proxies: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
if proxies and self.validator_pool:
|
||||
from app.core.db import get_db as _get_db
|
||||
from app.repositories.settings_repo import (
|
||||
SettingsRepository,
|
||||
DEFAULT_SETTINGS,
|
||||
)
|
||||
|
||||
async with _get_db() as db:
|
||||
db_settings = await SettingsRepository.get_all(db)
|
||||
if db_settings.get(
|
||||
"auto_validate_after_crawl",
|
||||
DEFAULT_SETTINGS["auto_validate_after_crawl"],
|
||||
):
|
||||
await self.validator_pool.submit(proxies)
|
||||
logger.info(
|
||||
f"CrawlJob {self.id}: submitted {len(proxies)} proxies for immediate validation"
|
||||
)
|
||||
|
||||
crawl_failed = bool(result and (result.failure_count > 0 or result.error))
|
||||
payload = {
|
||||
"plugin_id": self.plugin_id,
|
||||
"proxy_count": len(proxies),
|
||||
"crawl_failed": crawl_failed,
|
||||
"error": result.error if result else None,
|
||||
# 与持久化统计一致:success_count=本次爬到的条数,failure_count=是否失败(0/1)
|
||||
"success_count": len(proxies),
|
||||
"failure_count": result.failure_count if result else 0,
|
||||
}
|
||||
if result:
|
||||
payload["success_count"] = result.success_count
|
||||
payload["failure_count"] = result.failure_count
|
||||
self._set_completed(payload)
|
||||
return payload
|
||||
|
||||
@@ -133,7 +167,7 @@ class ValidateAllJob(Job):
|
||||
repo = self.proxy_repo or ProxyRepository()
|
||||
|
||||
async with get_db() as db:
|
||||
proxies = await repo.list_all(db)
|
||||
proxies = await repo.list_for_validation(db)
|
||||
|
||||
if not proxies:
|
||||
self._set_completed({"total": 0, "submitted": 0})
|
||||
|
||||
@@ -65,9 +65,12 @@ class AsyncWorkerPool:
|
||||
logger.info(f"{self.name} stopped")
|
||||
|
||||
async def submit(self, items: List[T]) -> None:
|
||||
"""提交一批任务到队列(阻塞直到有空位,天然背压)"""
|
||||
"""提交一批任务到队列(优先 put_nowait,队列满时再 await put)"""
|
||||
for item in items:
|
||||
await self._queue.put(item)
|
||||
try:
|
||||
self._queue.put_nowait(item)
|
||||
except asyncio.QueueFull:
|
||||
await self._queue.put(item)
|
||||
|
||||
async def drain(self) -> None:
|
||||
"""等待队列中所有任务被消费完毕"""
|
||||
|
||||
@@ -18,6 +18,8 @@ class BaseCrawlerPlugin(ABC):
|
||||
description: str = ""
|
||||
enabled: bool = True
|
||||
default_config: Dict[str, Any] = {}
|
||||
#: 单插件整段 crawl() 的 asyncio.wait_for 上限(秒),彼此独立、互不影响
|
||||
crawl_timeout_seconds: float = 120.0
|
||||
|
||||
def __init__(self):
|
||||
self._config: Dict[str, Any] = dict(self.default_config or {})
|
||||
|
||||
Reference in New Issue
Block a user