Files
ProxyPool/app/plugins/fpw_checkerproxy.py
祀梦 0131c8b408 feat: fpw plugins, validation/crawl perf, WS stats, test DB isolation
- Add Free_Proxy_Website-style fpw_* plugins and register them
- Per-plugin crawl timeout (crawl_timeout_seconds=120); remove global crawl_timeout setting
- Validator: fix connect vs total timeout on save; SOCKS session LRU cache; drop redundant semaphore
- Validation handler uses single DB connection; batch upsert after crawl; WorkerPool put_nowait
- Remove unused max_retries from settings API/UI; settings maintenance SQL + init_db cleanup of deprecated keys
- WebSocket dashboard stats; ProxyList pool_filter and API alignment
- POST /api/proxies/delete-one for IPv6-safe deletes; task poll stops on 404
- pytest uses PROXYPOOL_DB_PATH=db/proxies.test.sqlite so tests do not wipe production DB
- .gitignore: explicit proxies.test.sqlite patterns; fix plugin_service ValidationException import

Made-with: Cursor
2026-04-05 13:39:19 +08:00

66 lines
2.2 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""checkerproxy.net尝试常见导出路径 + 正文中的 ip:port排除示例占位"""
import re
from typing import List, Set, Tuple
from app.core.plugin_system import ProxyRaw
from app.plugins.base import BaseHTTPPlugin
from app.core.log import logger
class FpwCheckerproxyPlugin(BaseHTTPPlugin):
name = "fpw_checkerproxy"
display_name = "CheckerProxy.net"
description = "checkerproxy.net无稳定公开 API 时可能为空;多路径尝试)"
def __init__(self):
super().__init__()
self.urls = [
"https://checkerproxy.net/",
"https://checkerproxy.net/export",
"https://checkerproxy.net/api/export",
]
@staticmethod
def _parse_ip_ports(text: str) -> List[ProxyRaw]:
bad = {"123.123.123.123", "127.0.0.1", "0.0.0.0"}
seen: Set[Tuple[str, int]] = set()
out: List[ProxyRaw] = []
for m in re.finditer(
r"\b(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}):(\d{2,5})\b",
text,
):
ip, ps = m.group(1), m.group(2)
if ip in bad:
continue
if not ps.isdigit() or not (1 <= int(ps) <= 65535):
continue
key = (ip, int(ps))
if key in seen:
continue
seen.add(key)
try:
out.append(ProxyRaw(ip, int(ps), "http"))
except ValueError:
continue
return out
async def crawl(self) -> List[ProxyRaw]:
merged: List[ProxyRaw] = []
seen: Set[Tuple[str, int, str]] = set()
htmls = await self.fetch_all(self.urls, timeout=12, retries=1)
for html in htmls:
if not html or len(html) < 200:
continue
for p in self._parse_ip_ports(html):
k = (p.ip, p.port, p.protocol)
if k not in seen:
seen.add(k)
merged.append(p)
if len(merged) >= 50:
break
if merged:
logger.info(f"{self.display_name} 解析 {len(merged)}")
else:
logger.warning(f"{self.display_name} 未解析到代理(站点可能仅提供在线检测)")
return merged