- Add Free_Proxy_Website-style fpw_* plugins and register them - Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting - Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore - Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait - Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys - WebSocket dashboard stats; ProxyList pool_filter and API alignment - POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404 - pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB - .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import Made-with: Cursor
45 lines
1.4 KiB
Python
45 lines
1.4 KiB
Python
"""插件基类 - 所有爬虫插件必须继承此基类"""
|
|
from abc import ABC, abstractmethod
|
|
from typing import List, Dict, Any
|
|
from app.models.domain import ProxyRaw
|
|
|
|
|
|
class BaseCrawlerPlugin(ABC):
|
|
"""爬虫插件基类
|
|
|
|
添加新爬虫只需:
|
|
1. 继承 BaseCrawlerPlugin
|
|
2. 实现 crawl() 方法返回 List[ProxyRaw]
|
|
3. 用 @registry.register 装饰或在 __init__ 中显式注册
|
|
"""
|
|
|
|
name: str = ""
|
|
display_name: str = ""
|
|
description: str = ""
|
|
enabled: bool = True
|
|
default_config: Dict[str, Any] = {}
|
|
#: 单插件整段 crawl() 的 asyncio.wait_for 上限(秒),彼此独立、互不影响
|
|
crawl_timeout_seconds: float = 120.0
|
|
|
|
def __init__(self):
|
|
self._config: Dict[str, Any] = dict(self.default_config or {})
|
|
|
|
@property
|
|
def config(self) -> Dict[str, Any]:
|
|
return self._config
|
|
|
|
def update_config(self, updates: Dict[str, Any]) -> None:
|
|
"""更新插件配置,只覆盖存在的键"""
|
|
for key, value in updates.items():
|
|
if key in self._config:
|
|
self._config[key] = value
|
|
|
|
@abstractmethod
|
|
async def crawl(self) -> List[ProxyRaw]:
|
|
"""爬取代理的核心方法。只负责爬取,不要在这里验证。"""
|
|
raise NotImplementedError
|
|
|
|
async def health_check(self) -> bool:
|
|
"""可选:检查插件健康状态"""
|
|
return True
|