import sys import os sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))) from core.crawler import BasePlugin from core.log import logger import json import asyncio class Fate0Plugin(BasePlugin): def __init__(self): super().__init__() self.name = "Fate0聚合源" # 这是一个持续更新的高质量代理聚合列表 self.urls = ["https://raw.githubusercontent.com/fate0/proxylist/master/proxy.list"] async def parse(self, html): if not html: return count = 0 # fate0 的数据格式是每行一个 JSON 对象 for line in html.split('\n'): if not line.strip(): continue try: data = json.loads(line) ip = data.get('host') port = data.get('port') protocol = data.get('type', 'http') if ip and port: yield ip, int(port), protocol count += 1 except Exception: continue if count > 0: logger.info(f"{self.name} 解析完成,获得 {count} 个潜在代理") if __name__ == "__main__": async def test_plugin(): plugin = Fate0Plugin() print(f"========== 测试 {plugin.name} ==========") print(f"目标URL数量: {len(plugin.urls)}") print(f"开始抓取...\n") proxies = await plugin.run() print(f"\n========== 抓取结果 ==========") print(f"总计获取 {len(proxies)} 个代理:") print("-" * 60) for idx, (ip, port, protocol) in enumerate(proxies, 1): print(f"{idx:3d}. {ip:15s} : {str(port):5s} | {protocol}") print("-" * 60) print(f"完成!共 {len(proxies)} 个代理~") asyncio.run(test_plugin())