Files
ProxyPool/app/core/plugin_system/base.py
祀梦 635c524a7e refactor(backend): optimize database safety, validator performance, and scheduler concurrency
- Fix SQL injection risks in proxy_repo and task_repo
- Atomic acquire_pending with UPDATE ... RETURNING
- Reuse aiohttp ClientSession in ValidatorService
- Replace polling with asyncio.Event in SchedulerService
- Optimize ValidationQueue.drain with asyncio.Condition
- Concurrent plugin crawling with asyncio.gather
- Unify ProxyRaw model import path
- Fix test baseline and remove tracked __pycache__ files
2026-04-04 14:43:31 +08:00

43 lines
1.2 KiB
Python

"""插件基类 - 所有爬虫插件必须继承此基类"""
from abc import ABC, abstractmethod
from typing import List, Dict, Any
from app.models.domain import ProxyRaw
class BaseCrawlerPlugin(ABC):
"""爬虫插件基类
添加新爬虫只需:
1. 继承 BaseCrawlerPlugin
2. 实现 crawl() 方法返回 List[ProxyRaw]
3. 用 @registry.register 装饰或在 __init__ 中显式注册
"""
name: str = ""
display_name: str = ""
description: str = ""
enabled: bool = True
default_config: Dict[str, Any] = {}
def __init__(self):
self._config: Dict[str, Any] = dict(self.default_config or {})
@property
def config(self) -> Dict[str, Any]:
return self._config
def update_config(self, updates: Dict[str, Any]) -> None:
"""更新插件配置,只覆盖存在的键"""
for key, value in updates.items():
if key in self._config:
self._config[key] = value
@abstractmethod
async def crawl(self) -> List[ProxyRaw]:
"""爬取代理的核心方法。只负责爬取,不要在这里验证。"""
raise NotImplementedError
async def health_check(self) -> bool:
"""可选:检查插件健康状态"""
return True