from typing import List from core.plugin_system import ProxyRaw from plugins.base import BaseHTTPPlugin from core.log import logger class ProxyListDownloadPlugin(BaseHTTPPlugin): name = "proxylist_download" display_name = "ProxyListDownload" description = "从 ProxyListDownload API 获取代理" def __init__(self): super().__init__() self.urls = [ "https://www.proxy-list.download/api/v1/get?type=http", "https://www.proxy-list.download/api/v1/get?type=https", "https://www.proxy-list.download/api/v1/get?type=socks4", "https://www.proxy-list.download/api/v1/get?type=socks5", ] async def crawl(self) -> List[ProxyRaw]: results = [] for url in self.urls: html = await self.fetch(url, timeout=30) if not html: continue # 根据 URL 判断协议 if "type=socks4" in url: protocol = "socks4" elif "type=socks5" in url: protocol = "socks5" elif "type=https" in url: protocol = "https" else: protocol = "http" lines = html.split("\r\n") if len(lines) <= 1: lines = html.split("\n") for line in lines: line = line.strip() if not line or ":" not in line: continue parts = line.split(":") if len(parts) >= 2: ip = parts[0].strip() port = parts[1].strip() if ip and port.isdigit(): results.append(ProxyRaw(ip, int(port), protocol)) if results: logger.info(f"{self.display_name} 解析完成,获得 {len(results)} 个潜在代理") return results