- Add Free_Proxy_Website-style fpw_* plugins and register them - Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting - Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore - Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait - Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys - WebSocket dashboard stats; ProxyList pool_filter and API alignment - POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404 - pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB - .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import Made-with: Cursor
62 lines
1.6 KiB
Python
62 lines
1.6 KiB
Python
"""领域模型 - 纯数据结构,不依赖任何框架"""
|
||
from dataclasses import dataclass, field
|
||
from datetime import datetime
|
||
from typing import List, Optional
|
||
|
||
|
||
@dataclass
|
||
class ProxyRaw:
|
||
"""爬虫爬取的原始代理数据"""
|
||
ip: str
|
||
port: int
|
||
protocol: str = "http"
|
||
|
||
def __post_init__(self):
|
||
self.protocol = self.protocol.lower().strip()
|
||
if self.protocol not in ("http", "https", "socks4", "socks5"):
|
||
self.protocol = "http"
|
||
if not isinstance(self.port, int) or not (1 <= self.port <= 65535):
|
||
raise ValueError(f"port must be between 1 and 65535, got {self.port}")
|
||
|
||
|
||
@dataclass
|
||
class Proxy:
|
||
"""数据库中的代理实体"""
|
||
|
||
ip: str
|
||
port: int
|
||
protocol: str
|
||
score: int
|
||
response_time_ms: Optional[float] = None
|
||
last_check: Optional[datetime] = None
|
||
created_at: Optional[datetime] = None
|
||
validated: int = 0 # 0 待验证 1 已验证(可参与分数与对外取用)
|
||
|
||
|
||
@dataclass
|
||
class PluginInfo:
|
||
"""插件元数据"""
|
||
id: str
|
||
name: str
|
||
display_name: str
|
||
description: str
|
||
enabled: bool
|
||
last_run: Optional[datetime] = None
|
||
success_count: int = 0
|
||
failure_count: int = 0
|
||
|
||
|
||
@dataclass
|
||
class CrawlResult:
|
||
"""插件爬取结果
|
||
|
||
success_count: 最近一轮成功爬取到的代理条数(去重后),非「验证通过数」
|
||
failure_count: 最近一轮是否爬取失败(健康检查/超时/异常为 1,否则为 0)
|
||
"""
|
||
|
||
plugin_name: str
|
||
proxies: List[ProxyRaw] = field(default_factory=list)
|
||
success_count: int = 0
|
||
failure_count: int = 0
|
||
error: Optional[str] = None
|