import sys import os sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from core.crawler import BasePlugin from core.log import logger import asyncio class ProxyListDownloadPlugin(BasePlugin): def __init__(self): super().__init__() self.name = "ProxyListDownload" self.urls = [ "https://www.proxy-list.download/api/v1/get?type=http", "https://www.proxy-list.download/api/v1/get?type=https" ] async def parse(self, html): if not html: return lines = html.split('\r\n') if len(lines) <= 1: lines = html.split('\n') count = 0 for line in lines: line = line.strip() if not line: continue if ':' in line: parts = line.split(':') if len(parts) >= 2: ip = parts[0] port = parts[1] protocol = 'http' if 'type=http' in self.current_url else 'https' yield ip, int(port), protocol count += 1 if count > 0: logger.info(f"{self.name} 解析完成,从 {self.current_url} 获得 {count} 个潜在代理") if __name__ == "__main__": async def test_plugin(): plugin = ProxyListDownloadPlugin() print(f"========== 测试 {plugin.name} ==========") print(f"目标URL数量: {len(plugin.urls)}") print(f"开始抓取...\n") proxies = await plugin.run() print(f"\n========== 抓取结果 ==========") print(f"总计获取 {len(proxies)} 个代理:") print("-" * 60) for idx, (ip, port, protocol) in enumerate(proxies, 1): print(f"{idx:3d}. {ip:15s} : {str(port):5s} | {protocol}") print("-" * 60) print(f"完成!共 {len(proxies)} 个代理~") asyncio.run(test_plugin())