- Fix SQL injection risks in proxy_repo and task_repo - Atomic acquire_pending with UPDATE ... RETURNING - Reuse aiohttp ClientSession in ValidatorService - Replace polling with asyncio.Event in SchedulerService - Optimize ValidationQueue.drain with asyncio.Condition - Concurrent plugin crawling with asyncio.gather - Unify ProxyRaw model import path - Fix test baseline and remove tracked __pycache__ files
40 lines
1.3 KiB
Python
40 lines
1.3 KiB
Python
import json
|
|
from typing import List
|
|
from app.core.plugin_system import ProxyRaw
|
|
from app.plugins.base import BaseHTTPPlugin
|
|
from app.core.log import logger
|
|
|
|
|
|
class Fate0Plugin(BaseHTTPPlugin):
|
|
default_config = {"max_pages": 5}
|
|
name = "fate0"
|
|
display_name = "Fate0聚合站"
|
|
description = "来自 GitHub 持续更新的高质量代理聚合列表"
|
|
|
|
def __init__(self):
|
|
super().__init__()
|
|
self.urls = ["https://raw.githubusercontent.com/fate0/proxylist/master/proxy.list"]
|
|
|
|
async def crawl(self) -> List[ProxyRaw]:
|
|
results = []
|
|
for url in self.urls:
|
|
html = await self.fetch(url, timeout=30)
|
|
if not html:
|
|
continue
|
|
for line in html.split("\n"):
|
|
line = line.strip()
|
|
if not line:
|
|
continue
|
|
try:
|
|
data = json.loads(line)
|
|
ip = data.get("host")
|
|
port = data.get("port")
|
|
protocol = data.get("type", "http")
|
|
if ip and port:
|
|
results.append(ProxyRaw(ip, int(port), protocol))
|
|
except Exception:
|
|
continue
|
|
if results:
|
|
logger.info(f"{self.display_name} 解析完成,获取 {len(results)} 个潜在代理")
|
|
return results
|