import json from typing import List from app.core.plugin_system import ProxyRaw from app.plugins.base import BaseHTTPPlugin from app.core.log import logger class Fate0Plugin(BaseHTTPPlugin): default_config = {"max_pages": 5} name = "fate0" display_name = "Fate0聚合站" description = "来自 GitHub 持续更新的高质量代理聚合列表" def __init__(self): super().__init__() self.urls = [ "https://raw.githubusercontent.com/fate0/proxylist/master/proxy.list", "https://cdn.jsdelivr.net/gh/fate0/proxylist@master/proxy.list", ] async def crawl(self) -> List[ProxyRaw]: results = [] # 顺序 fetch,带 fallback for url in self.urls: html = await self.fetch(url, timeout=30) if html: break if not html: logger.warning(f"{self.display_name} 所有源均不可用") return results for line in html.split("\n"): line = line.strip() if not line: continue try: data = json.loads(line) ip = data.get("host") port = data.get("port") protocol = data.get("type", "http") if ip and port: results.append(ProxyRaw(ip, int(port), protocol)) except Exception: continue if results: logger.info(f"{self.display_name} 解析完成,获取 {len(results)} 个潜在代理") return results